def crawl():
count = 0
make_dir('./log')
while True:
count+=1
status, res = subprocess.getstatusoutput('scrapy crawl news')
if status == 0:
print(res)
else:
print('crawl failed {}'.format(res))
for file in os.listdir(os.getcwd()):
if os.path.isfile(file) and 'res_' in file:
with open(file,'r') as fobj:
try:
res = json.load(fobj)
except Exception as e:
print(e)
res = None
if res:
shutil.copy(file,'./log/{}'.format(file))
insert_value(res)
print(res)
os.remove(file)
print('loop {} finished'.format(count))
time.sleep(60*30)
评论列表
文章目录