def file_to_combine_setup(request):
file_pattern = 'combine_me_comids_{0}consistent{1}.nc'
tempdir = tempfile.gettempdir()
consistent_id_order = [join(tempdir, file_pattern.format('', i))
for i in range(3)]
inconsistent_id_order = [join(tempdir, file_pattern.format('in', i))
for i in range(3)]
ids = [2, 4, 6, 8]
flows_template = [3.1, 2.2, 5.0, 7.1]
date_template = '2017-04-29_0{0}:00:00'
for i, nc_file in enumerate(consistent_id_order):
date = date_template.format(i)
flows = [flow * (i + 1) for flow in flows_template]
if i == 1:
flows[1] = -9999.0 # one way of masking data
elif i == 2:
flows = ma.masked_array(flows, mask=[0, 1, 0, 0]) # explicit mask
with Dataset(nc_file, 'w') as nc:
nc.model_output_valid_time = date
dim = nc.createDimension('feature_id', 4)
id_var = nc.createVariable('feature_id', 'i', ('feature_id',))
id_var[:] = ids
flow_var = nc.createVariable('streamflow', 'f', ('feature_id',),
fill_value=-9999.0)
flow_var[:] = flows
nwm_subset.combine_files(consistent_id_order, _ids_in_order_nc)
for i, nc_file in enumerate(inconsistent_id_order):
date = date_template.format(i)
flows = [flow * (i + 1) for flow in flows_template]
if i == 1:
comids = ids[::-1]
flows = flows[::-1]
else:
comids = ids
with Dataset(nc_file, 'w') as nc:
nc.model_output_valid_time = date
dim = nc.createDimension('feature_id', 4)
id_var = nc.createVariable('feature_id', 'i', ('feature_id',))
id_var[:] = comids
flow_var = nc.createVariable('streamflow', 'f', ('feature_id',),
fill_value=-9999.0)
flow_var[:] = flows
nwm_subset.combine_files(inconsistent_id_order, _ids_not_in_order_nc,
river_ids=[2], consistent_id_order=False)
delete_me = consistent_id_order + inconsistent_id_order
for filename in delete_me:
os.remove(filename)
def file_to_combine_teardown():
os.remove(_ids_in_order_nc)
os.remove(_ids_not_in_order_nc)
request.addfinalizer(file_to_combine_teardown)
评论列表
文章目录