def append_times_to_dataset(times, dataset, time_units):
"""Appends the given list of times to the dataset. Assumes the time units
in the NetCDF4 dataset correspond to the string time_units."""
it_start = dataset.dimensions['time'].size
it_end = it_start + len(times)
if isinstance(times[0], timedelta):
times_list = []
for time in times:
times_list.append(time.total_seconds())
time_array = from_unit_to_another(
np.array(times_list), 'seconds', time_units)
dataset.variables['time'][it_start:it_end] = time_array[:]
else: # assume datetime
dataset.variables['time'][it_start:it_end] = nc4.date2num(
times, dataset.variables['time'].units,
calendar='proleptic_gregorian'
)
python类date2num()的实例源码
def test_time_from_variable():
'''Should read date from time variable.'''
tempdir = tempfile.gettempdir()
nc_file = os.path.join(tempdir, 'test_time_from_variable.nc')
date_obj = parser.parse('2017-04-29 04:00:00')
units = 'minutes since 1970-01-01 00:00:00 UTC'
nc_date = round(date2num(date_obj, units))
with Dataset(nc_file, 'w') as nc:
dim = nc.createDimension('time', 1)
time_var = nc.createVariable('time', 'i', ('time',))
time_var[:] = [nc_date]
time_var.units = units
with Dataset(nc_file, 'r') as nc:
expected = date_obj.replace(tzinfo=pytz.utc)
returned = nwm_data.time_from_dataset(nc)
assert expected == returned
os.remove(nc_file)
def create_netcdf(savef, dts, dat):
""" Write Florida current data to netcdf file """
dataset = Dataset(savef, 'w', format='NETCDF4_CLASSIC')
# Create time coordinate
tdim = dataset.createDimension('time', None)
time = dataset.createVariable('time',np.float64,(tdim.name,))
time.units = 'hours since 0001-01-01 00:00:00.0'
time.calendar = 'gregorian'
time[:] = date2num(dts, time.units, calendar=time.calendar)
# Create data variable
fc = dataset.createVariable('florida_current_transport',np.float64,(tdim.name),fill_value=1.e20)
fc.units = 'Sv'
fc[:] = dat
# close file
print 'SAVING: %s' % savef
dataset.close()
def write_to_netcdf(self, ncfile):
""" Write observation data to netcdf file """
# Open ncfile and create coords
dataset = Dataset(ncfile, 'w', format='NETCDF4_CLASSIC')
tdim = dataset.createDimension('time', None)
# Create time coordinate
time = dataset.createVariable('time',np.float64,(tdim.name,))
time.units = 'hours since 0001-01-01 00:00:00.0'
time.calendar = 'gregorian'
time[:] = date2num(self.dates, time.units, calendar=time.calendar)
# Create variables
fc = dataset.createVariable('florida_current_transport',np.float64,(tdim.name,))
fc.units = 'Sv'
fc[:] = self.fc
# Close file
print 'SAVING: %s' % ncfile
dataset.close()
def write_to_netcdf(self, ncfile):
""" Write observation data to netcdf file """
# Open ncfile and create coords
dataset = Dataset(ncfile, 'w', format='NETCDF4_CLASSIC')
tdim = dataset.createDimension('time', None)
# Create time coordinate
time = dataset.createVariable('time',np.float64,(tdim.name,))
time.units = 'hours since 0001-01-01 00:00:00.0'
time.calendar = 'gregorian'
time[:] = date2num(self.dates, time.units, calendar=time.calendar)
# Create variables
fc = dataset.createVariable('florida_current_transport',np.float64,(tdim.name,))
fc.units = 'Sv'
fc[:] = self.fc
# Close file
print 'SAVING: %s' % ncfile
dataset.close()
def _iter (self):
from fstd2nc.mixins import _var_type
from datetime import datetime
import numpy as np
from netCDF4 import date2num
if self._reference_date is None:
reference_date = None
else:
reference_date = datetime.strptime(self._reference_date,'%Y-%m-%d')
varlist = super(netCDF_IO,self)._iter()
if self._unique_names:
varlist = list(varlist)
self._fix_names(varlist)
for var in varlist:
# Modify time axes to be relative units instead of datetime objects.
if var.name in var.axes and isinstance(var,_var_type) and isinstance(var.array[0],np.datetime64):
# Convert from np.datetime64 to datetime.datetime
var.array = var.array.tolist()
units = '%s since %s'%(self._time_units, reference_date or var.array[0])
var.atts.update(units=units)
var.array = np.asarray(date2num(var.array,units=units))
yield var
def write_to_netcdf(self, ncfile):
""" Write observation data to netcdf file """
# Open ncfile and create coords
dataset = Dataset(ncfile, 'w', format='NETCDF4_CLASSIC')
zdim = dataset.createDimension('depth', self.z.size)
tdim = dataset.createDimension('time', None)
# Create time coordinate
time = dataset.createVariable('time',np.float64,(tdim.name,))
time.units = 'hours since 0001-01-01 00:00:00.0'
time.calendar = 'gregorian'
time[:] = date2num(self.dates, time.units, calendar=time.calendar)
# Create depth coordinate
z = dataset.createVariable('depth',np.float64,(zdim.name,))
z.units = 'm'
z[:] = self.z
# Create streamfunction variable
sf = dataset.createVariable('stream_function_mar',np.float64,(tdim.name, zdim.name))
sf.units = 'Sv'
sf[:] = self.sf
# Close file
print 'SAVING: %s' % ncfile
dataset.close()
def write_to_netcdf(self, ncfile):
""" Write observation data to netcdf file """
# Open ncfile and create coords
dataset = Dataset(ncfile, 'w', format='NETCDF4_CLASSIC')
tdim = dataset.createDimension('time', None)
# Create time coordinate
time = dataset.createVariable('time',np.float64,(tdim.name,))
time.units = 'hours since 0001-01-01 00:00:00.0'
time.calendar = 'gregorian'
time[:] = date2num(self.dates, time.units, calendar=time.calendar)
# Create variables
ek = dataset.createVariable('t_ek10',np.float64,(tdim.name,))
ek.units = 'Sv'
ek[:] = self.ekman
umo = dataset.createVariable('t_umo10',np.float64,(tdim.name,))
umo.units = 'Sv'
umo[:] = self.umo
fc = dataset.createVariable('t_gs10',np.float64,(tdim.name,))
fc.units = 'Sv'
fc[:] = self.fc
moc = dataset.createVariable('t_moc_mar_hc10',np.float64,(tdim.name,))
moc.units = 'Sv'
moc[:] = self.moc
# Close file
print 'SAVING: %s' % ncfile
dataset.close()
def write_to_netcdf(self, ncfile):
""" Write observation data to netcdf file """
# Open ncfile and create coords
dataset = Dataset(ncfile, 'w', format='NETCDF4_CLASSIC')
zdim = dataset.createDimension('depth', self.z.size)
tdim = dataset.createDimension('time', None)
# Create time coordinate
time = dataset.createVariable('time',np.float64,(tdim.name,))
time.units = 'hours since 0001-01-01 00:00:00.0'
time.calendar = 'gregorian'
time[:] = date2num(self.dates, time.units, calendar=time.calendar)
# Create depth coordinate
z = dataset.createVariable('depth',np.float64,(zdim.name,))
z.units = 'm'
z[:] = self.z
# Create streamfunction variable
sf = dataset.createVariable('stream_function_mar',np.float64,(tdim.name, zdim.name))
sf.units = 'Sv'
sf[:] = self.sf
# Close file
print 'SAVING: %s' % ncfile
dataset.close()
def load_data(file, varname, extent=None, period=None, **kwargs):
"""
Loads netCDF files and extracts data given a spatial extend and time period
of interest.
"""
# Open either single or multi-file data set depending if list of wildcard
if "*" in file or isinstance(file, list):
ds = xr.open_mfdataset(file, decode_times=False)
else:
ds = xr.open_dataset(file, decode_times=False)
# Construct condition based on spatial extents
if extent:
n, e, s, w = extent
ds = ds.sel(lat=(ds.lat >= s) & (ds.lat <= n))
# Account for extent crossing Greenwich
if w > e:
ds = ds.sel(lon=(ds.lon >= w) | (ds.lon <= e))
else:
ds = ds.sel(lon=(ds.lon >= w) & (ds.lon <= e))
# Construct condition base on time period
if period:
t1 = date2num(datetime(*period[0]), ds.time.units, ds.time.calendar)
t2 = date2num(datetime(*period[1]), ds.time.units, ds.time.calendar)
ds = ds.sel(time=(ds.time >= t1) & (ds.time <= t2))
# Extra keyword arguments to select from additional dimensions (e.g. plev)
if kwargs:
ds = ds.sel(**kwargs)
# Load in the data to a numpy array
dates = num2date(ds.time, ds.time.units, ds.time.calendar)
arr = ds[varname].values
lat = ds.lat.values
lon = ds.lon.values
# Convert pr units to mm/day
if ds[varname].units == 'kg m-2 s-1':
arr *= 86400
# Convert tas units to degK
elif ds[varname].units == 'K':
arr -= 273.15
return arr, lat, lon, dates