def write(table_name,
table_schema_path,
connection_string,
input_file,
db_schema,
geometry_support,
from_srid,
skip_headers,
indexes_fields):
table_schema = get_table_schema(table_schema_path)
## TODO: csv settings? use Frictionless Data csv standard?
## TODO: support line delimted json?
with fopen(input_file) as file:
if re.match(s3_regex, input_file) != None:
rows = csv.reader(codecs.iterdecode(file, 'utf-8'))
else:
rows = csv.reader(file)
if skip_headers:
next(rows)
if re.match(carto.carto_connection_string_regex, connection_string) != None:
load_postgis = geometry_support == 'postgis'
if indexes_fields != None:
indexes_fields = indexes_fields.split(',')
carto.load(db_schema, table_name, load_postgis, table_schema, connection_string, rows, indexes_fields)
else:
connection_string = get_connection_string(connection_string)
engine, storage = create_storage_adaptor(connection_string, db_schema, geometry_support, from_srid=from_srid)
## TODO: truncate? carto does. Makes this idempotent
if table_schema_path != None:
table_schema = get_table_schema(table_schema_path)
storage.describe(table_name, descriptor=table_schema)
if geometry_support == None and engine.dialect.driver == 'psycopg2':
copy_from(engine, table_name, table_schema, rows)
else:
storage.write(table_name, rows)
评论列表
文章目录