def test_time(self):
"""Make sure the time array looks ok. Evenly spaced, bounds are correct."""
numeric_times = self.output.variables["time"][:]
self.assertAlmostEqual(np.mean(np.diff(numeric_times)), 0.1, delta=0.01)
self.assertAlmostEqual(np.min(np.diff(numeric_times)), 0.1, delta=0.01)
self.assertAlmostEqual(np.max(np.diff(numeric_times)), 0.1, delta=0.01)
datetimes = nc.num2date(numeric_times, self.output.variables["time"].units)
self.assertLess(abs((datetimes[0]-self.start_time).total_seconds()), 0.1)
self.assertLess(abs((datetimes[-1]-self.end_time).total_seconds()), 0.1)
python类num2date()的实例源码
def finalize(self, nc_out):
# TODO: when primary is implemented, make sure to use primary min and max
# actually, do raise exceptions here, handle higher up
udim = next((d for d in self.config.dims.values() if d["max"] is not None), None)
if udim is None:
# bail early if udim is None, ie. no unlimited dim configured
return ""
max = udim["max"]
if isinstance(max, datetime):
return datetime_format(max)
else:
udim_indexed_by = udim["index_by"]
dt = nc.num2date(max, self.config.vars[udim_indexed_by]["attributes"]["units"])
return datetime_format(dt)
def read_icl_na(ifile, apply_flag=False):
"""Reads in the ICL data from the Aerodyne Tunable IR Laser Direct
Absorption Spectrometer (TILDAS) model QC-TILDAS-DUAL
:param ifile: nasaAmes input filename
:type ifile:
:param apply_flag:
:type apply_flag: boolean
:return: pandas.DataFrame"""
try:
import nappy
except:
sys.stdout.write('Can not import nappy ...\n')
return
ds = nappy.openNAFile(ifile)
ds.readData()
timestamp = netCDF4.num2date(ds.X, ds.getIndependentVariable(0)[1])
from collections import OrderedDict
dict = OrderedDict()
dict['timestamp'] = timestamp
for i, v in enumerate(['c2h6_conc', 'c2h6_flag']):
dict[v] = ds.V[i]
df = pd.DataFrame(dict)
df = df.set_index('timestamp')
if apply_flag:
df['c2h6_conc'][df['c2h6_flag'] != 0] = np.nan
return df
def read_ncfile3D(filename, varname, make_cuts=False):
"""
Reads a 3D field along the time for variable "varname", i.e. results a 4D numpy array.
If "make_cuts" is not False, it should be a list of pairs of tuples representing cuts to be made and returned.
Example:
make_cuts = ((lat1, lon1),(lat2, lon2))
:param filename:
:param varname:
:param make_cut:
:return:
"""
try:
file = nc.Dataset(filename, 'r')
except IOError as e:
logging.error("%s\n\tFile: %s", "I/O error({0}): {1}".format(e.errno, e.strerror), filename)
return None
except RuntimeError as e:
logging.error("%s\n\tRequested file: %s", e.message, filename)
pdb.set_trace()
return None
try:
f_data_pointer = file.variables[varname]
time = file.variables['Time']
lat = file.variables['lat'][:, 0]
lon = file.variables['lon'][0, :]
try:
c_Units = f_data_pointer.units
except AttributeError:
c_Units = None
if make_cuts:
f_data, lat, lon, elev = get_planes(f_data_pointer, make_cuts, lat, lon, file.variables['height'])
else:
# These two lines are a lot of data!
elev = file.variables['height'][:]
f_data = f_data_pointer[:]
try:
f_data = np.where(f_data == f_data_pointer._FillValue, IncF.f_FillValue, f_data)
except AttributeError:
logging.warning('No FillValue')
except KeyError:
logging.warning('Variable %s is not in %s, so it will created a NaN matrix', varname, filename)
f_data = np.empty(shape=file.variables['height'].shape)
f_data.fill(IncF.f_FillValue)
c_Units = None
try:
d_time = nc.num2date(time[:], units=time.units, calendar=time.calendar)
except AttributeError:
c_time = [''.join(str(t) for t in TT) for TT in time[:]]
d_time = [dt.datetime.strptime(c_T[:], '%Y-%m-%d_%H:%M:%S') for c_T in c_time]
d_TimeArray = d_time + dt.timedelta(hours=IncF.i_TimeZone)
file.close()
return f_data, lat, lon, elev, d_TimeArray, c_Units
def __init__(self,
data=(datetime.now(),),
filename=None,
varname=None,
tz_offset=None,
origin=None,
displacement=timedelta(seconds=0),
**kwargs):
'''
Representation of a time axis. Provides interpolation alphas and indexing.
:param time: Ascending list of times to use
:param tz_offset: offset to compensate for time zone shifts
:param origin: shifts the time interval to begin at the time specified
:param displacement: displacement to apply to the time data. Allows shifting entire time interval into future or past
:type time: netCDF4.Variable or [] of datetime.datetime
:type tz_offset: datetime.timedelta
:type origin: datetime.timedelta
:type displacement: datetime.timedelta
'''
if isinstance(data, (nc4.Variable, nc4._netCDF4._Variable)):
self.data = nc4.num2date(data[:], units=data.units)
elif data is None:
self.data = np.array([datetime.now()])
else:
self.data = np.asarray(data)
if origin is not None:
diff = self.data[0] - origin
self.data -= diff
self.data += displacement
self.filename = filename
self.varname = varname
# if self.filename is None:
# self.filename = self.id + '_time.txt'
if tz_offset is not None:
self.data += tz_offset
if not self._timeseries_is_ascending(self.data):
raise ValueError("Time sequence is not ascending")
if self._has_duplicates(self.data):
raise ValueError("Time sequence has duplicate entries")
self.name = data.name if hasattr(data, 'name') else None
def load_data(file, varname, extent=None, period=None, **kwargs):
"""
Loads netCDF files and extracts data given a spatial extend and time period
of interest.
"""
# Open either single or multi-file data set depending if list of wildcard
if "*" in file or isinstance(file, list):
ds = xr.open_mfdataset(file, decode_times=False)
else:
ds = xr.open_dataset(file, decode_times=False)
# Construct condition based on spatial extents
if extent:
n, e, s, w = extent
ds = ds.sel(lat=(ds.lat >= s) & (ds.lat <= n))
# Account for extent crossing Greenwich
if w > e:
ds = ds.sel(lon=(ds.lon >= w) | (ds.lon <= e))
else:
ds = ds.sel(lon=(ds.lon >= w) & (ds.lon <= e))
# Construct condition base on time period
if period:
t1 = date2num(datetime(*period[0]), ds.time.units, ds.time.calendar)
t2 = date2num(datetime(*period[1]), ds.time.units, ds.time.calendar)
ds = ds.sel(time=(ds.time >= t1) & (ds.time <= t2))
# Extra keyword arguments to select from additional dimensions (e.g. plev)
if kwargs:
ds = ds.sel(**kwargs)
# Load in the data to a numpy array
dates = num2date(ds.time, ds.time.units, ds.time.calendar)
arr = ds[varname].values
lat = ds.lat.values
lon = ds.lon.values
# Convert pr units to mm/day
if ds[varname].units == 'kg m-2 s-1':
arr *= 86400
# Convert tas units to degK
elif ds[varname].units == 'K':
arr -= 273.15
return arr, lat, lon, dates
def extract(core_netcdf):
"""
Extracts all CO and O3 data from a FAAM core netCDF.
"""
ncfilename = os.path.join(core_netcdf.path, core_netcdf.filename)
ds = FAAM_Dataset(ncfilename)
_ds_index = ds.index.ravel()
units = 'seconds since %s 00:00:00 +0000' % str(_ds_index[0])[:10]
timestamp = netCDF4.num2date(ds.variables['Time'][:].ravel(), units)
n = timestamp.size
if 'CO_AERO' in ds.variables.keys():
co_aero = ds.variables['CO_AERO'][:]
co_aero_flag = ds.variables['CO_AERO_FLAG'][:]
co_aero[co_aero_flag != 0] = -9999.0
else:
co_aero = np.zeros(n)-9999.0
if 'O3_TECO' in ds.variables.keys():
o3_teco = ds.variables['O3_TECO'][:]
o3_teco_flag = ds.variables['O3_TECO_FLAG'][:]
o3_teco[o3_teco_flag != 0] = -9999.0
else:
o3_teco = np.zeros(n)-9999.0
# Old FAAM files didn't have the GIN instrument fitted
if 'LAT_GIN' in ds.variables.keys():
lon_var_name = 'LON_GIN'
lat_var_name = 'LAT_GIN'
alt_var_name = 'ALT_GIN'
elif 'LAT_GPS' in ds.variables.keys():
lon_var_name = 'LON_GPS'
lat_var_name = 'LAT_GPS'
alt_var_name = 'GPS_ALT'
if len(ds.variables[lon_var_name][:].shape) > 1:
x = ds.variables[lon_var_name][:, 0].ravel()
y = ds.variables[lat_var_name][:, 0].ravel()
z = ds.variables[alt_var_name][:, 0].ravel()
else:
x = ds.variables[lon_var_name][:].ravel()
y = ds.variables[lat_var_name][:].ravel()
z = ds.variables[alt_var_name][:].ravel()
wow = ds.variables['WOW_IND'][:].ravel()
timestamp_string = [t.strftime('%Y-%m-%dT%H:%M:%S') for t in timestamp]
fid = [core_netcdf.fid,]*n
result = zip(list(np.array(timestamp_string)[wow == 0]),
list(np.array(fid)[wow == 0]),
list(x[wow == 0]),
list(y[wow == 0]),
list(z[wow == 0]),
list(co_aero[wow == 0]),
list(o3_teco[wow == 0]))
return result
# open the output file and write the column labels out