def fit(self, x, y, batch_size=32, epochs=1, verbose=1, callbacks=None,
validation_split=0.0, validation_data=None, steps_per_epoch=None):
"""Create an `InMemoryDataset` instance with the given data and train
the model using importance sampling for a given number of epochs.
Arguments
---------
x: Numpy array of training data or list of numpy arrays
y: Numpy array of target data
batch_size: int, number of samples per gradient update
epochs: int, number of times to iterate over the entire
training set
verbose: {0, >0}, whether to employ the progress bar Keras
callback or not
callbacks: list of Keras callbacks to be called during training
validation_split: float in [0, 1), percentage of data to use for
evaluation
validation_data: tuple of numpy arrays, Data to evaluate the
trained model on without ever training on them
steps_per_epoch: int or None, number of gradient updates before
considering an epoch has passed
Returns
-------
A Keras `History` object that contains information collected during
training.
"""
# Create two data tuples from the given x, y, validation_*
if validation_data is not None:
x_train, y_train = x, y
x_test, y_test = validation_data
elif validation_split > 0:
assert validation_split < 1, "100% of the data used for testing"
n = int(round(validation_split * len(x)))
idxs = np.arange(len(x))
np.random.shuffle(idxs)
x_train, y_train = x[idxs[n:]], y[idxs[n:]]
x_test, y_test = x[idxs[:n]], y[idxs[:n]]
else:
x_train, y_train = x, y
x_test, y_test = np.empty(shape=(0, 1)), np.empty(shape=(0, 1))
# Make the dataset to train on
dataset = InMemoryDataset(
x_train,
y_train,
x_test,
y_test,
categorical=False # this means use the targets as is
)
return self.fit_dataset(
dataset=dataset,
batch_size=batch_size,
epochs=epochs,
steps_per_epoch=steps_per_epoch,
verbose=verbose,
callbacks=callbacks
)
评论列表
文章目录