def testNGramsBadSizes(self):
string_tensor = tf.constant(['abc', 'def', 'fghijklm', 'z', ''])
tokenized_tensor = tf.string_split(string_tensor, delimiter='')
with self.assertRaisesRegexp(ValueError, 'Invalid ngram_range'):
mappers.ngrams(tokenized_tensor, (0, 5), separator='')
with self.assertRaisesRegexp(ValueError, 'Invalid ngram_range'):
mappers.ngrams(tokenized_tensor, (6, 5), separator='')
评论列表
文章目录