python类set_value()的实例源码

rnn_cell.py 文件源码 项目:yoctol-keras-layer-zoo 作者: Yoctol 项目源码 文件源码 阅读 43 收藏 0 点赞 0 评论 0
def reset_states(self, states=None):
        if states is None:
            self.recurrent_layer.reset_states(states)
        else:
            self.recurrent_layer.reset_states(states[:-1])

        batch_size = self.recurrent_layer.input_spec[0].shape[0]
        if self.dense_state is None:
            self.dense_state = K.zeros((
                batch_size,
                self.dense_layer.units
            ))
        elif states is None:
            K.set_value(
                self.dense_state,
                np.zeros((batch_size, self.dense_layer.units))
            )
        else:
            K.set_value(
                self.dense_state,
                states[-1]
            )
model_wrappers.py 文件源码 项目:importance-sampling 作者: idiap 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def set_lr(self, lr):
        """Set the learning rate of the wrapped models.

        We try to set the learning rate on a member variable model and a member
        variable small. If we do not find a member variable model we raise a
        NotImplementedError
        """
        try:
            K.set_value(
                self.model.optimizer.lr,
                lr
            )
        except AttributeError:
            raise NotImplementedError()

        try:
            K.set_value(
                self.small.optimizer.lr,
                lr
            )
        except AttributeError:
            pass
model_wrappers.py 文件源码 项目:importance-sampling 作者: idiap 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def set_lr(self, lr):
        """Set the learning rate of the wrapped models.

        We try to set the learning rate on a member variable model and a member
        variable small. If we do not find a member variable model we raise a
        NotImplementedError
        """
        try:
            K.set_value(
                self.model.optimizer.lr,
                lr
            )
        except AttributeError:
            raise NotImplementedError()

        try:
            K.set_value(
                self.small.optimizer.lr,
                lr
            )
        except AttributeError:
            pass
model_wrappers.py 文件源码 项目:importance-sampling 作者: idiap 项目源码 文件源码 阅读 37 收藏 0 点赞 0 评论 0
def set_lr(self, lr):
        """Set the learning rate of the wrapped models.

        We try to set the learning rate on a member variable model and a member
        variable small. If we do not find a member variable model we raise a
        NotImplementedError
        """
        try:
            K.set_value(
                self.model.optimizer.lr,
                lr
            )
        except AttributeError:
            raise NotImplementedError()

        try:
            K.set_value(
                self.small.optimizer.lr,
                lr
            )
        except AttributeError:
            pass
model_wrappers.py 文件源码 项目:importance-sampling 作者: idiap 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def set_lr(self, lr):
        """Set the learning rate of the wrapped models.

        We try to set the learning rate on a member variable model and a member
        variable small. If we do not find a member variable model we raise a
        NotImplementedError
        """
        try:
            K.set_value(
                self.model.optimizer.lr,
                lr
            )
        except AttributeError:
            raise NotImplementedError()

        try:
            K.set_value(
                self.small.optimizer.lr,
                lr
            )
        except AttributeError:
            pass
LSTM_Clfn_finetuned.py 文件源码 项目:Word2Vec 作者: hashbangCoder 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def on_epoch_end(self, epoch, logs={}):
            current = logs.get(self.monitor)
            if current is None:
                warnings.warn('Early stopping requires %s available!' % (self.monitor), RuntimeWarning)

            if self.monitor_op(current, self.best):
                self.best = current
                self.wait = 0
            else:
                if (self.wait == self.patience - 1) and self.anneal:
                        print 'Halving Learning Rate...'
                        K.set_value(self.model.optimizer.lr, K.get_value(self.model.optimizer.lr)/2)

                elif self.wait >= self.patience:
                    print('Epoch %d: early stopping' % (epoch))
                    self.model.stop_training = True
                self.wait += 1
LSTM_Clfn_pretrained.py 文件源码 项目:Word2Vec 作者: hashbangCoder 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def on_epoch_end(self, epoch, logs={}):
            current = logs.get(self.monitor)
            if current is None:
                warnings.warn('Early stopping requires %s available!' % (self.monitor), RuntimeWarning)

            if self.monitor_op(current, self.best):
                self.best = current
                self.wait = 0
            else:
                if (self.wait == self.patience - 1) and self.anneal:
                        print 'Halving Learning Rate...'
                        K.set_value(self.model.optimizer.lr, K.get_value(self.model.optimizer.lr)/2)

                elif self.wait >= self.patience:
                    print('Epoch %d: early stopping' % (epoch))
                    self.model.stop_training = True
                self.wait += 1
LSTM_Bidirectional.py 文件源码 项目:Word2Vec 作者: hashbangCoder 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def on_epoch_end(self, epoch, logs={}):
            current = logs.get(self.monitor)
            if current is None:
                warnings.warn('Early stopping requires %s available!' % (self.monitor), RuntimeWarning)

            if self.monitor_op(current, self.best):
                self.best = current
                self.wait = 0
            else:
                if (self.wait == self.patience - 1) and self.anneal:
                        print 'Halving Learning Rate...'
                        K.set_value(self.model.optimizer.lr, K.get_value(self.model.optimizer.lr)/2)

                elif self.wait >= self.patience:
                    print('Epoch %d: early stopping' % (epoch))
                    self.model.stop_training = True
                self.wait += 1
cwrnn.py 文件源码 项目:LSTM-GRU-CNN-MLP 作者: ansleliu 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_spec[0].shape
        if not input_shape[0]:
            raise Exception('If a RNN is stateful, a complete ' +
                            'input_shape must be provided (including batch size).')

        if self.go_backwards:
            initial_time = self.input_spec[0].shape[1]
        else:
            initial_time = 0.

        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0], self.output_dim)))
            K.set_value(self.states[1], initial_time)
        else:
            self.states = [K.zeros((input_shape[0], self.output_dim)), K.variable(initial_time)]
recurrent_convolutional.py 文件源码 项目:keras-prednet 作者: kunimasa-kawasaki 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_shape
        if not input_shape[0]:
            raise Exception('If a RNN is stateful, a complete ' +
                            'input_shape must be provided ' +
                            '(including batch size).')

        if self.return_sequences:
            out_row, out_col, out_filter = self.output_shape[2:]
        else:
            out_row, out_col, out_filter = self.output_shape[1:]

        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0],
                                  out_row, out_col, out_filter)))
            K.set_value(self.states[1],
                        np.zeros((input_shape[0],
                                  out_row, out_col, out_filter)))
        else:
            self.states = [K.zeros((input_shape[0],
                                    out_row, out_col, out_filter)),
                           K.zeros((input_shape[0],
                                    out_row, out_col, out_filter))]
callbacks.py 文件源码 项目:mcv-m5 作者: david-vazquez 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def on_epoch_begin(self, epoch, logs=None):
        current_lr = float(K.get_value(self.model.optimizer.lr))
        try:
            new_lr = current_lr / self.decay_rate
            if (self.decay_epochs is None) or ((epoch+1) in self.decay_epochs):
                # Decay current learning rate and assign it to the model
                K.set_value(self.model.optimizer.lr, new_lr)
                print('    \nLearning rate decayed by a factor of {}: {:.2E} --> {:.2E}\n'.format(
                    self.decay_rate,
                    current_lr,
                    new_lr
                )
                )
        except TypeError:
            raise ValueError('Decay rate for LRDecayScheduler must be a number.\n'
                             'Decay epochs for LRDecayScheduler must be a list of numbers.')
separable_RNN.py 文件源码 项目:New_Layers-Keras-Tensorflow 作者: WeidiXie 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_spec[0].shape
        if not input_shape[0]:
            raise Exception('If a RNN is stateful, it needs to know '
                            'its batch size. Specify the batch size '
                            'of your input tensors: \n'
                            '- If using a Sequential model, '
                            'specify the batch size by passing '
                            'a `batch_input_shape` '
                            'argument to your first layer.\n'
                            '- If using the functional API, specify '
                            'the time dimension by passing a '
                            '`batch_shape` argument to your Input layer.')
        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0], self.output_dim)))
        else:
            self.states = [K.zeros((input_shape[0], self.output_dim))]
layer_normalization_RNN.py 文件源码 项目:New_Layers-Keras-Tensorflow 作者: WeidiXie 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_spec[0].shape
        if not input_shape[0]:
            raise Exception('If a RNN is stateful, it needs to know '
                            'its batch size. Specify the batch size '
                            'of your input tensors: \n'
                            '- If using a Sequential model, '
                            'specify the batch size by passing '
                            'a `batch_input_shape` '
                            'argument to your first layer.\n'
                            '- If using the functional API, specify '
                            'the time dimension by passing a '
                            '`batch_shape` argument to your Input layer.')
        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0], self.output_dim)))
        else:
            self.states = [K.zeros((input_shape[0], self.output_dim))]
cwrnn.py 文件源码 项目:ikelos 作者: braingineer 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_spec[0].shape
        if not input_shape[0]:
            raise Exception('If a RNN is stateful, a complete ' +
                            'input_shape must be provided (including batch size).')

        if self.go_backwards:
            initial_time = self.input_spec[0].shape[1]
        else:
            initial_time = 0.

        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0], self.output_dim)))
            K.set_value(self.states[1], initial_time)
        else:
            self.states = [K.zeros((input_shape[0], self.output_dim)), K.variable(initial_time)]
rnnrbm.py 文件源码 项目:keras_bn_library 作者: bnsnapper 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_spec[0].shape

        if not input_shape[0]:
            raise Exception('If a RNN is stateful, a complete ' +
                            'input_shape must be provided (including batch size).')

        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0], self.hidden_recurrent_dim)))
            K.set_value(self.states[1],
                        np.zeros((input_shape[0], self.input_dim)))
            K.set_value(self.states[2],
                        np.zeros((input_shape[0], self.hidden_dim)))
        else:
            self.states = [K.zeros((input_shape[0], self.hidden_recurrent_dim)),
                            K.zeros((input_shape[0], self.input_dim)),
                            K.zeros((input_shape[0], self.hidden_dim))]
recurrent.py 文件源码 项目:keras_bn_library 作者: bnsnapper 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_spec[0].shape
        if not input_shape[0]:
            raise ValueError('If a RNN is stateful, it needs to know '
                             'its batch size. Specify the batch size '
                             'of your input tensors: \n'
                             '- If using a Sequential model, '
                             'specify the batch size by passing '
                             'a `batch_input_shape` '
                             'argument to your first layer.\n'
                             '- If using the functional API, specify '
                             'the time dimension by passing a '
                             '`batch_shape` argument to your Input layer.')
        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0], self.input_dim)))
            K.set_value(self.states[1],
                        np.zeros((input_shape[0], self.output_dim)))
        else:
            self.states = [K.zeros((input_shape[0], self.input_dim)),
                            K.zeros((input_shape[0], self.output_dim))]
callbacks.py 文件源码 项目:segmentation_DLMI 作者: imatge-upc 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def on_epoch_end(self, epoch, logs={}):

        if epoch in self.predefined_epochs or -1 in self.predefined_epochs:
            lr = K.get_value(self.model.optimizer.lr) / self.decay_rate
            K.set_value(self.model.optimizer.lr, lr)
callbacks.py 文件源码 项目:segmentation_DLMI 作者: imatge-upc 项目源码 文件源码 阅读 39 收藏 0 点赞 0 评论 0
def on_epoch_end(self, epoch, logs={}):
        if epoch > self.epoch_n:
            ratio = 1.0 * (self.num_epoch - epoch)  # epoch_n + 1 because learning rate is set for next epoch
            ratio = max(0, ratio / (self.num_epoch - self.epoch_n))
            lr = np.float32(self.lr_init * ratio)
            K.set_value(self.model.optimizer.lr,lr)
callbacks.py 文件源码 项目:segmentation_DLMI 作者: imatge-upc 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def on_epoch_end(self, epoch, logs={}):
        if epoch > self.epoch_n:
            ratio = 1.0 - self.decay*(epoch - self.epoch_n )
            ratio = max(0, ratio)
            lr = np.float32(self.lr_init * ratio)
            K.set_value(self.model.optimizer.lr, lr)
layers.py 文件源码 项目:recurrent-attention-for-QA-SQUAD-based-on-keras 作者: wentaozhu 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_spec[0].shape
        if not input_shape[0]:
            raise ValueError('If a RNN is stateful, a complete '
                             'input_shape must be provided '
                             '(including batch size).')
        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0], self.output_dim)))
        else:
            self.states = [K.zeros((input_shape[0], self.output_dim))]
rnnlayer.py 文件源码 项目:recurrent-attention-for-QA-SQUAD-based-on-keras 作者: wentaozhu 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_spec[0].shape
        if not input_shape[0]:
            raise ValueError('If a RNN is stateful, a complete '
                             'input_shape must be provided '
                             '(including batch size).')
        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0], self.units)))
        else:
            self.states = [K.zeros((input_shape[0], self.units))]
rnnlayer.py 文件源码 项目:recurrent-attention-for-QA-SQUAD-based-on-keras 作者: wentaozhu 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_spec[0].shape
        if not input_shape[0]:
            raise ValueError('If a RNN is stateful, a complete '
                             'input_shape must be provided '
                             '(including batch size).')
        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0], self.units)))
        else:
            self.states = [K.zeros((input_shape[0], self.units))]
rnnlayer.py 文件源码 项目:recurrent-attention-for-QA-SQUAD-based-on-keras 作者: wentaozhu 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_spec[0].shape
        if not input_shape[0]:
            raise ValueError('If a RNN is stateful, a complete '
                             'input_shape must be provided '
                             '(including batch size).')
        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0], self.units)))
        else:
            self.states = [K.zeros((input_shape[0], self.units))]
rnnlayer.py 文件源码 项目:recurrent-attention-for-QA-SQUAD-based-on-keras 作者: wentaozhu 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def reset_states(self):
        assert self.stateful, 'Layer must be stateful.'
        input_shape = self.input_spec[0].shape
        if not input_shape[0]:
            raise ValueError('If a RNN is stateful, a complete '
                             'input_shape must be provided '
                             '(including batch size).')
        if hasattr(self, 'states'):
            K.set_value(self.states[0],
                        np.zeros((input_shape[0], self.units)))
        else:
            self.states = [K.zeros((input_shape[0], self.units))]
model.py 文件源码 项目:latplan 作者: guicho271828 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def cool(self, epoch, logs):
        K.set_value(
            self.tau,
            np.max([self.min,
                    self.max * np.exp(- self.anneal_rate * epoch)]))
model.py 文件源码 项目:latplan 作者: guicho271828 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def _load(self):
        super()._load()
        K.set_value(self.c, self.parameters['c'])
model.py 文件源码 项目:latplan 作者: guicho271828 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def train(self,train_data,
              batch_size=1000,
              save=True,
              train_data_to=None,
              test_data=None,
              test_data_to=None,
              **kwargs):
        super().train(train_data,
                      batch_size=batch_size,
                      train_data_to=train_data_to,
                      test_data=test_data,
                      test_data_to=test_data_to,
                      save=False,
                      **kwargs)

        s = self.net.predict(test_data[test_data_to == 1],batch_size=batch_size)
        if np.count_nonzero(test_data_to == 1) > 0:
            c = s.mean()
            print("PU constant c =", c)
            K.set_value(self.c, c)
            self.parameters['c'] = float(c)
            # prevent saving before setting c
            if save:
                self.save()
        else:
            raise Exception("there are no positive data in the validation set; Training failed.")
helper.py 文件源码 项目:Kutils 作者: ishank26 项目源码 文件源码 阅读 37 收藏 0 点赞 0 评论 0
def on_epoch_begin(self, epoch, logs={}):
        old_lr = self.model.optimizer.lr.get_value()
        if epoch > 1 and epoch % self.n_epoch == 0:
            new_lr = self.decay * old_lr
            k.set_value(self.model.optimizer.lr, new_lr)
        else:
            k.set_value(self.model.optimizer.lr, old_lr)


# keras integrated
helper.py 文件源码 项目:Kutils 作者: ishank26 项目源码 文件源码 阅读 40 收藏 0 点赞 0 评论 0
def on_epoch_end(self, epoch, logs={}):
        loss = logs.items()[1][1]  # get loss
        print "loss: ", loss
        old_lr = self.model.optimizer.lr.get_value()  # get old lr
        new_lr = old_lr * np.exp(loss)  # lr*exp(loss)
        k.set_value(self.model.optimizer.lr, new_lr)


# decaylr=LearningRateScheduler(decay_sch)


# checkpoint=ModelCheckpoint("weights/adam_noep{0}_batch{1}_seq_{2}.hdf5".format(\
# no_epochs,batch, seq_length), monitor='loss', verbose=0,
# save_best_only=True, save_weights_only=False, mode='min')
callbacks.py 文件源码 项目:keras 作者: GeekLiB 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def on_epoch_begin(self, epoch, logs={}):
        assert hasattr(self.model.optimizer, 'lr'), \
            'Optimizer must have a "lr" attribute.'
        lr = self.schedule(epoch)

        if not isinstance(lr, (float, np.float32, np.float64)):
            raise ValueError('The output of the "schedule" function '
                             'should be float.')

        K.set_value(self.model.optimizer.lr, lr)


问题


面经


文章

微信
公众号

扫码关注公众号