def scrap(self):
"""
scrap target url page
"""
# get list of detail article url
article_detail_url_list = self.get_article_detail_urls()
article_detail_info = []
for article_url in article_detail_url_list:
try:
article_dict = self.get_article_detail_info_dict(article_url)
article_detail_info.append(article_dict)
except AttributeError as err:
print("[ Exception ] Exception occured in GScraper#scrap: {}".format(err))
traceback.print_tb(err.__traceback__)
self.save_article_detail_info_list_to_csv(article_detail_info)
评论列表
文章目录