def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
python类sparse_to_dense()的实例源码
def _decode_lambda(self, args):
"""
Decoding within tensorflow graph.
In case kenlm_directory is specified, a modified version of tensorflow
(available at https://github.com/timediv/tensorflow-with-kenlm)
is needed to run that extends ctc_decode to use a kenlm decoder.
:return:
Most probable decoded sequence. Important: blank labels are returned as `-1`.
"""
import tensorflow as tf
prediction_batch, prediction_lengths = args
log_prediction_batch = tf.log(tf.transpose(prediction_batch, perm=[1, 0, 2]) + 1e-8)
prediction_length_batch = tf.to_int32(tf.squeeze(prediction_lengths, axis=[1]))
(decoded, log_prob) = self.ctc_get_decoded_and_log_probability_batch(log_prediction_batch,
prediction_length_batch)
return single([tf.sparse_to_dense(st.indices, st.dense_shape, st.values, default_value=-1) for st in decoded])
def kSparse(self, x, topk):
print 'run regular k-sparse'
dim = int(x.get_shape()[1])
if topk > dim:
warnings.warn('Warning: topk should not be larger than dim: %s, found: %s, using %s' % (dim, topk, dim))
topk = dim
k = dim - topk
values, indices = tf.nn.top_k(-x, k) # indices will be [[0, 1], [2, 1]], values will be [[6., 2.], [5., 4.]]
# We need to create full indices like [[0, 0], [0, 1], [1, 2], [1, 1]]
my_range = tf.expand_dims(tf.range(0, tf.shape(indices)[0]), 1) # will be [[0], [1]]
my_range_repeated = tf.tile(my_range, [1, k]) # will be [[0, 0], [1, 1]]
full_indices = tf.stack([my_range_repeated, indices], axis=2) # change shapes to [N, k, 1] and [N, k, 1], to concatenate into [N, k, 2]
full_indices = tf.reshape(full_indices, [-1, 2])
to_reset = tf.sparse_to_dense(full_indices, tf.shape(x), tf.reshape(values, [-1]), default_value=0., validate_indices=False)
res = tf.add(x, to_reset)
return res
def labels_to_onehots(labels, num_classes):
"""Convert a vector of integer class labels to a matrix of one-hot target vectors.
:param labels: a vector of integer labels, 0 to num_classes. Has shape (batch_size,).
:param num_classes: the total number of classes
:return: has shape (batch_size, num_classes)
"""
batch_size = labels.get_shape().as_list()[0]
with tf.name_scope("one_hot"):
labels = tf.expand_dims(labels, 1)
indices = tf.expand_dims(tf.range(0, batch_size, 1), 1)
sparse_ptrs = tf.concat(1, [indices, labels], name="ptrs")
onehots = tf.sparse_to_dense(sparse_ptrs, [batch_size, num_classes],
1.0, 0.0)
return onehots
def labels_to_onehots(labels, num_classes):
"""Convert a vector of integer class labels to a matrix of one-hot target vectors.
:param labels: a vector of integer labels, 0 to num_classes. Has shape (batch_size,).
:param num_classes: the total number of classes
:return: has shape (batch_size, num_classes)
"""
batch_size = labels.get_shape().as_list()[0]
with tf.name_scope("one_hot"):
labels = tf.expand_dims(labels, 1)
indices = tf.expand_dims(tf.range(0, batch_size, 1), 1)
sparse_ptrs = tf.concat(1, [indices, labels], name="ptrs")
onehots = tf.sparse_to_dense(sparse_ptrs, [batch_size, num_classes],
1.0, 0.0)
return onehots
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for name_scope.
Returns:
one hot encoding of the labels.
"""
with tf.name_scope(scope, 'OneHotEncoding', [labels]):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat([indices, labels], 1)
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def loss_func_softmax(pred, gold):
"""softmax function with integers as the second argument (instead of zero-one encoding matrix)
Args:
pred: log-odds where the last dimension is the number of labels
gold: integer array the same size as pred but the last dimension which is 1
Returns:
the softmax values applied to the predictions
"""
pred = tf.reshape(pred, [-1, pred.get_shape()[-1].value])
gold = tf.reshape(gold, [pred.get_shape()[0].value])
n = pred.get_shape()[0].value
voc_size = pred.get_shape()[1].value
rg = tf.range(0, n)
inds = tf.transpose(tf.pack([rg, tf.cast(gold, 'int32')]))
vals = tf.ones([n])
# gold_mat = tf.SparseTensor( , [n, voc_size])
gold_mat = tf.sparse_to_dense(inds, [n, voc_size], vals)
return tf.nn.softmax_cross_entropy_with_logits(pred, gold_mat)
def one_hot(labels, num_classes, name='one_hot'):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope(name):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for name_scope.
Returns:
one hot encoding of the labels.
"""
with tf.name_scope(scope, 'OneHotEncoding', [labels]):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def backward(self):
dx_flat = self.probs
coords = tf.transpose(tf.pack([tf.range(self.N * self.T), self.y_flat]))
binary_mask = tf.sparse_to_dense(coords, dx_flat.get_shape(), 1)
# convert 1/0 to True/False
binary_mask = tf.cast(binary_mask, tf.bool)
decremented = dx_flat - 1
# make new x out of old values or decresed, depending on mask
dx_flat = tf.select(binary_mask, decremented, dx_flat)
dx_flat /= self.N
dx_flat *= self.mask_flat[:, None]
dx = tf.reshape(dx_flat, [self.N, self.T, self.V])
return dx
def backward(self):
dx_flat = self.probs
coords = tf.transpose(tf.pack([tf.range(self.N * self.T), self.y_flat]))
binary_mask = tf.sparse_to_dense(coords, dx_flat.get_shape(), 1)
# convert 1/0 to True/False
binary_mask = tf.cast(binary_mask, tf.bool)
decremented = dx_flat - 1
# make new x out of old values or decresed, depending on mask
dx_flat = tf.select(binary_mask, decremented, dx_flat)
dx_flat /= self.N
dx_flat *= self.mask_flat[:, None]
dx = tf.reshape(dx_flat, [self.N, self.T, self.V])
return dx
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for name_scope.
Returns:
one hot encoding of the labels.
"""
with tf.name_scope(scope, 'OneHotEncoding', [labels]):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(axis=1, values=[indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.stack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def shift_thin_stack(thin_stack, thin_stack_head_next, batch_size,
max_num_concepts, decoder_position,
prev_transition_state):
"""Applies shift to the thin stack and its head if in shift state."""
# Head points to item after stack top, so always update the stack entry.
new_thin_stack = write_thin_stack(thin_stack, thin_stack_head_next,
decoder_position, batch_size, max_num_concepts)
# Push if previous transition state is shift (or pointer shift).
stack_head_updates = tf.sparse_to_dense(tf.pack(
[data_utils.GEN_STATE]),
tf.pack([data_utils.NUM_TR_STATES]), 1)
new_thin_stack_head_next = tf.add(thin_stack_head_next,
tf.gather(stack_head_updates, prev_transition_state))
return new_thin_stack, new_thin_stack_head_next
def gather_nd_states(inputs, inds, batch_size, input_size, state_size):
"""Gathers an embedding for each batch entry with index inds from inputs.
Args:
inputs: Tensor [batch_size, input_size, state_size].
inds: Tensor [batch_size]
Returns:
output: Tensor [batch_size, embedding_size]
"""
sparse_inds = tf.transpose(tf.pack(
[tf.range(batch_size), inds]))
dense_inds = tf.sparse_to_dense(sparse_inds,
tf.pack([batch_size, input_size]),
tf.ones(tf.pack([batch_size])))
output_sum = tf.reduce_sum(tf.reshape(dense_inds,
[-1, input_size, 1, 1]) * tf.reshape(inputs,
[-1, input_size, 1, state_size]), [1, 2])
output = tf.reshape(output_sum, [-1, state_size])
return output
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for name_scope.
Returns:
one hot encoding of the labels.
"""
with tf.name_scope(scope, 'OneHotEncoding', [labels]):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(axis=1, values=[indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.stack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for op_scope.
Returns:
one hot encoding of the labels.
"""
with tf.op_scope([labels], scope, 'OneHotEncoding'):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
def prepare_reader(self, filename_queue):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
contexts, features = tf.parse_single_sequence_example(
serialized_example,
context_features={
"video_id": tf.FixedLenFeature([], tf.string),
"labels": tf.VarLenFeature(tf.int64)},
sequence_features={
"rgb": tf.FixedLenSequenceFeature([], dtype=tf.string),
"audio": tf.FixedLenSequenceFeature([], dtype=tf.string),
})
# read ground truth labels
labels = (tf.cast(
tf.sparse_to_dense(contexts["labels"].values, (self.num_classes,), 1,
validate_indices=False),
tf.bool))
rgbs, num_frames = self.get_video_matrix(features["rgb"], 1024, self.max_frames)
audios, num_frames = self.get_video_matrix(features["audio"], 1024, self.max_frames)
batch_video_ids = tf.expand_dims(contexts["video_id"], 0)
batch_rgbs = tf.expand_dims(rgbs, 0)
batch_audios = tf.expand_dims(audios, 0)
batch_labels = tf.expand_dims(labels, 0)
batch_frames = tf.expand_dims(num_frames, 0)
return batch_video_ids, batch_rgbs, batch_audios, batch_labels, batch_frames
def get_config(self):
config = {'topk': self.topk, 'ctype': self.ctype}
base_config = super(KCompetitive, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
# def k_comp_sigm(self, x, topk):
# print 'run k_comp_sigm'
# dim = int(x.get_shape()[1])
# if topk > dim:
# warnings.warn('topk should not be larger than dim: %s, found: %s, using %s' % (dim, topk, dim))
# topk = dim
# values, indices = tf.nn.top_k(x, topk) # indices will be [[0, 1], [2, 1]], values will be [[6., 2.], [5., 4.]]
# # We need to create full indices like [[0, 0], [0, 1], [1, 2], [1, 1]]
# my_range = tf.expand_dims(tf.range(0, K.shape(indices)[0]), 1) # will be [[0], [1]]
# my_range_repeated = tf.tile(my_range, [1, topk]) # will be [[0, 0], [1, 1]]
# full_indices = tf.stack([my_range_repeated, indices], axis=2) # change shapes to [N, k, 1] and [N, k, 1], to concatenate into [N, k, 2]
# full_indices = tf.reshape(full_indices, [-1, 2])
# to_reset = tf.sparse_to_dense(full_indices, tf.shape(x), tf.reshape(values, [-1]), default_value=0., validate_indices=False)
# batch_size = tf.to_float(tf.shape(x)[0])
# tmp = 1 * batch_size * tf.reduce_sum(x - to_reset, 1, keep_dims=True) / topk
# res = tf.sparse_to_dense(full_indices, tf.shape(x), tf.reshape(tf.add(values, tmp), [-1]), default_value=0., validate_indices=False)
# return res