def fetch_and_parse_from_url(self, url, parser, force=False, **options):
with urllib.request.urlopen(url) as request:
with tempfile.TemporaryFile() as csvfile:
csvfile.write(request.read())
csvfile.seek(0)
reader = csv.DictReader(io.TextIOWrapper(csvfile, encoding='utf-8'))
total = 0
imported = 0
import_errors = 0
for row in reader:
total += 1
row = self.format_csv_fields(row)
logger.debug(row)
exists = parser.exists_in_db(row)
if exists and not force:
# Skip this row
continue
id = row.get(parser.key)
try:
data = parser.parse(row)
logger.debug(data)
model, created = parser.commit(data)
except Exception as err:
import_errors += 1
logger.error('%s "%s" could not be parsed: parse_errors=%s row=%s',
parser.name, id, err, row)
logger.exception(err)
continue
imported += 1
if created:
logger.info('Created %s "%s"', parser.name, id)
else:
logger.info('Updated %s "%s"', parser.name, id)
logger.info('Import %s data complete: total=%s imported=%s errors=%s',
parser.name, total, imported, import_errors)
评论列表
文章目录