def test_search(self):
with open('tests/fixtures/twitter.json', 'r') as feed_file:
page1 = json.loads("".join(feed_file.readlines()))
with open('tests/fixtures/twitter.2.json', 'r') as feed_file:
page2 = json.loads("".join(feed_file.readlines()))
responses.add(responses.GET,
re.compile('(?!.*max_id=\d*)https?://api.twitter.com.*'),
json=page1, status=200)
responses.add(responses.GET,
re.compile('(?=.*max_id=\d*)https?://api.twitter.com.*'),
json=page2, status=200)
q = "release"
cache_key = "{}:q-{}".format(self.cache_key, q)
self.assertIsNone(cache.get(cache_key))
# Ensure we set the SEARCH_MAX_HISTORY big enough for both twitter
# pages to be included
now = datetime.datetime.now(tzutc())
last_post_date = TwitterFeedItem.get_post_date(page2[-1])
delta = (now - last_post_date) + datetime.timedelta(seconds=10)
with override_settings(WAGTAIL_SOCIALFEED_SEARCH_MAX_HISTORY=delta):
stream = self.stream.get_items(config=self.feedconfig,
query_string=q)
self.assertIsNotNone(cache.get(cache_key))
self.assertEqual(len(stream), 2)
for s in stream:
self.assertIn('release', s.text)
评论列表
文章目录