def __loadTicksFromMongo(self,host,port,dbName,symbolName,startDatetimeStr,endDatetimeStr):
"""mid
??mongodb?????????????????
"""
mongoConnection = MongoClient( host=host,port=port)
collection = mongoConnection[dbName][symbolName]
startDate = dt.datetime.strptime(startDatetimeStr, '%Y-%m-%d %H:%M:%S')
endDate = dt.datetime.strptime(endDatetimeStr, '%Y-%m-%d %H:%M:%S')
cx = collection.find({'datetime': {'$gte': startDate, '$lte': endDate}})
tickDatetimeNums = []
tickPrices = []
for d in cx:
tickDatetimeNums.append(mpd.date2num(d['datetime']))
tickPrices.append(d['lastPrice'])
return tickDatetimeNums,tickPrices
#----------------------------------------------------------------------
python类date2num()的实例源码
def handler_all_price_csv(txtPath:str, pictureRoot:str):
"""??csv??????????,??k?????label
:param txtPath: ??txt??
:param pictureRoot: ??????
"""
if not os.path.exists(pictureRoot):
os.mkdir(pictureRoot)
DATA = pd.read_csv(txtPath, low_memory=False)
# ?matplotlib.dates?time????????
DATA['??'] = DATA['??'].map(lambda x: mdates.date2num(datetime.datetime.strptime(x, '%Y/%m/%d')) if re.match(r"[0-9]{4}/[0-9]{1,2}/[0-9]{1,2}", str(x)) else x)
# ??????????????
stocknames = list(set(DATA['??'].tolist()))
n=0
while n<11000:
stockname=random.choice(stocknames)
data= DATA[DATA['??'] == stockname ] # ?????????????
length=len(data)#??????????
if length<360: continue#????120+240???
idx=random.randint(360,length-1)
df=data.iloc[idx-120:idx]#???i?120??? #print(len(df))
kplot(df,os.path.join(pictureRoot,"%s-%s" % (stockname,str(idx).zfill(6))))
n+=1
def extract_subset(self,start_date,end_date):
"""Function for extracting shorter time series
Provide a starting and ending dates and the function will return an
other ChannelList object with a subset of the original
"""
templist=ChannelList()
tsub=[t for t in self.t if date2num(t)>=date2num(start_date) and date2num(t)<=date2num(end_date)]
indexes=[self.t.index(t) for t in tsub]
indexes.sort()
templist.line=[self.line[ind] for ind in indexes]
templist.station=[self.station[ind] for ind in indexes]
templist.alt=[self.alt[ind] for ind in indexes]
templist.grav=[self.grav[ind] for ind in indexes]
templist.sd=[self.sd[ind] for ind in indexes]
templist.tiltx=[self.tiltx[ind] for ind in indexes]
templist.tilty=[self.tilty[ind] for ind in indexes]
templist.temp=[self.temp[ind] for ind in indexes]
templist.etc=[self.etc[ind] for ind in indexes]
templist.dur=[self.dur[ind] for ind in indexes]
templist.rej=[self.rej[ind] for ind in indexes]
templist.t=[self.t[ind] for ind in indexes]
templist.keepdata=[self.keepdata[ind] for ind in indexes]
return templist
def timevect(d_StartDate, d_EndDate, c_TimeFreq, DT=None):
f_Time = []
d_Time = []
while d_StartDate <= d_EndDate:
d_Time.append(d_StartDate)
f_Time.append(date2num(d_StartDate))
f_Date_aux = date2num(d_StartDate)
if c_TimeFreq == 'Monthly':
DT_aux = monthrange(num2date(f_Date_aux).year, num2date(f_Date_aux).month)[1]
DT = dt.timedelta(days=DT_aux)
elif c_TimeFreq == 'Yearly':
# finding out if it is a leap-year
if isleap(d_StartDate.year + 1):
DT = dt.timedelta(days=366)
else:
DT = dt.timedelta(days=365)
d_StartDate += DT
return f_Time, d_Time
def load_symbol(self):
start = parser.parse(str(self.ui_controller.dateStartEdit.text()))
end = parser.parse(str(self.ui_controller.dateEndEdit.text()))
symbol = str(self.ui_controller.symbolLineEdit.text())
if not symbol: return
data = _load_raw_yahoo_data(stocks=[symbol], indexes={},
start=start, end=end)
self.df = data[symbol]
self.df.columns = [col.lower() for col in self.df.columns]
self.df['datetime'] = self.df.index
self.df['datetime'] = self.df.apply(
lambda row: mdates.date2num(row['datetime']),
axis=1)
if 'adj close' in self.df.columns:
self.df['close'] = self.df['adj close']
self.ui_controller.matplotlibWidget.set_data(self.df)
self.ui_controller.matplotlibWidget.draw_data()
self.ui_controller.symbolLineEdit.setText('')
def _candlestick_ax(df, ax):
quotes = df.reset_index()
quotes.loc[:, 'datetime'] = mdates.date2num(quotes.loc[:, 'datetime'].astype(dt.date))
fplt.candlestick_ohlc(ax, quotes.values, width=0.4, colorup='red', colordown='green')
def add_cal_coefficients_to_axes(ax, table):
"""
Plots calibration coefficients on the CO plot.
"""
_xlim=ax.get_xlim()
_ylim=ax.get_ylim()
for line in table:
#print(line)
if ((date2num(datetime.datetime.strptime(line[0], '%Y-%m-%d %H:%M:%S')) > _xlim[0]) &
(date2num(datetime.datetime.strptime(line[1], '%Y-%m-%d %H:%M:%S')) < _xlim[1])):
x=(date2num(datetime.datetime.strptime(line[0], '%Y-%m-%d %H:%M:%S')) + \
date2num(datetime.datetime.strptime(line[1], '%Y-%m-%d %H:%M:%S')))/2.0
y=0.8*_ylim[1]
#http://stackoverflow.com/questions/17086847/box-around-text-in-matplotlib
ax.text(x, y, '\n'.join([i.strip() for i in line[2:]]), horizontalalignment='center', verticalalignment='top',
fontsize='small', color='black', bbox=dict(facecolor='wheat', edgecolor='black', boxstyle='round,pad=0.6'))
def plot_day_summary_oclh(ax, quotes, ticksize=3, colorup='r', colordown='g', ):
"""Plots day summary
Represent the time, open, close, high, low as a vertical line
ranging from low to high. The left tick is the open and the right
tick is the close.
Parameters
----------
ax : `Axes`
an `Axes` instance to plot to
quotes : sequence of (time, open, close, high, low, ...) sequences
data to plot. time must be in float date format - see date2num
ticksize : int
open/close tick marker in points
colorup : color
the color of the lines where close >= open
colordown : color
the color of the lines where close < open
Returns
-------
lines : list
list of tuples of the lines added (one tuple per quote)
"""
return _plot_day_summary(ax, quotes, ticksize=ticksize,
colorup=colorup, colordown=colordown,
ochl=True)
def plot_day_summary_ohlc(ax, quotes, ticksize=3, colorup='r', colordown='g', ):
"""Plots day summary
Represent the time, open, high, low, close as a vertical line
ranging from low to high. The left tick is the open and the right
tick is the close.
Parameters
----------
ax : `Axes`
an `Axes` instance to plot to
quotes : sequence of (time, open, high, low, close, ...) sequences
data to plot. time must be in float date format - see date2num
ticksize : int
open/close tick marker in points
colorup : color
the color of the lines where close >= open
colordown : color
the color of the lines where close < open
Returns
-------
lines : list
list of tuples of the lines added (one tuple per quote)
"""
return _plot_day_summary(ax, quotes, ticksize=ticksize,
colorup=colorup, colordown=colordown,
ochl=False)
def candlestick_ochl(ax, quotes, width=0.2, colorup='r', colordown='g', alpha=1.0):
"""
Plot the time, open, close, high, low as a vertical line ranging
from low to high. Use a rectangular bar to represent the
open-close span. If close >= open, use colorup to color the bar,
otherwise use colordown
Parameters
----------
ax : `Axes`
an Axes instance to plot to
quotes : sequence of (time, open, close, high, low, ...) sequences
As long as the first 5 elements are these values,
the record can be as long as you want (e.g., it may store volume).
time must be in float days format - see date2num
width : float
fraction of a day for the rectangle width
colorup : color
the color of the rectangle where close >= open
colordown : color
the color of the rectangle where close < open
alpha : float
the rectangle alpha level
Returns
-------
ret : tuple
returns (lines, patches) where lines is a list of lines
added and patches is a list of the rectangle patches added
"""
return _candlestick(ax, quotes, width=width, colorup=colorup,
colordown=colordown,
alpha=alpha, ochl=True)
def __getTickDatetimeByXPosition(self,xAxis):
"""mid
????????datetimeNum??x?
????view????????x???????tick?time?xAxis???index?????datetime??????datetimeNum
return:str
"""
tickDatetimeRet = xAxis
minYearDatetimeNum = mpd.date2num(dt.datetime(1900,1,1))
if(xAxis > minYearDatetimeNum):
tickDatetime = mpd.num2date(xAxis).astimezone(pytz.timezone('utc'))
if(tickDatetime.year >=1900):
tickDatetimeRet = tickDatetime
return tickDatetimeRet
#----------------------------------------------------------------------
def getTickDatetimeByXPosition(self,xAxis):
"""mid
?????x??????????????
"""
tickDatetimeRet = xAxis
minYearDatetimeNum = mpd.date2num(dt.datetime(1900,1,1))
if(xAxis > minYearDatetimeNum):
tickDatetime = mpd.num2date(xAxis).astimezone(pytz.timezone('utc'))
if(tickDatetime.year >=1900):
tickDatetimeRet = tickDatetime
return tickDatetimeRet
def initHistoricalData(self, symbol):
"""??????"""
d = {}
cx = self.mainEngine.dbQuery(MINUTE_DB_NAME, symbol, d)
if cx:
for data in cx:
date = datetime.strptime(data['date'], "%Y%m%d")
n = date2num(date)
o = data['open'] # OHLC
h = data['high']
l = data['low']
c = data['close']
oi = data['openInterest']
self.listBar.append((n, o, c, l, h))
self.listOpen.append(o)
self.listClose.append(c)
self.listHigh.append(h)
self.listLow.append(l)
self.listOpenInterest.append(oi)
self.initCompleted = True # ????????
print "initCompleted!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
self.plotKline() # K??
#----------------------------------------------------------------------
def interpolateOnGivenTimes(self,t):
"""
interpolate the time series on the user input time vector
overlain the previous t and d fields
"""
tord=[date2num(tmp) for tmp in self.t]
# tord=date2num(self.t) # create np.array
f=interp1d(tord, self.d, kind='linear',bounds_error=False)
self.d=f([date2num(tmp) for tmp in t])
# self.d=f(date2num(t)) # create np.array...
self.t=t
def createArrayData(self,ChannelList_obj) :
"""
Create the np array data for table display, and update the
ChannelList_obj. This function can be called from outside to update the
table display
"""
self.ChannelList_obj = ChannelList_obj
self.arraydata=np.concatenate((ChannelList_obj.station,
np.array(ChannelList_obj.grav)*1000,np.array(ChannelList_obj.sd)*1000,ChannelList_obj.tiltx,
ChannelList_obj.tilty,ChannelList_obj.temp,ChannelList_obj.dur,
ChannelList_obj.rej,
(date2num(ChannelList_obj.t)-date2num(ChannelList_obj.t[0]))*24*60,
np.array(ChannelList_obj.t))).reshape(len(ChannelList_obj.t),10,order='F')
def read_verticalperiodfile(c_Model, c_Var, planes=False):
# Routine to read variable from hourly (HH) file
c_Files = ops.find(IncF.c_ModelDir, 'PP_' + c_Model + '-3D*')
if c_Files is None:
return None
c_File = c_Files[0]
logging.info('Reading File: %s', c_File.split(os.sep)[-1])
if planes:
assert IncF.f_Make_Cuts # To make sure this option was filled. This doesn't check format
new_data = ncfile.read_ncfile3D(c_File, c_Var, IncF.f_Make_Cuts)
else:
new_data = ncfile.read_ncfile3D(c_File, c_Var)
if new_data is not None:
f_Data_aux, f_lat, f_lon, f_elev_aux, d_Time_aux, c_Units = new_data
else:
return None
# CHECKING FOR TIME FREQUENCY BETWEEN TIME STEPS
c_TimeFreq, Dt = delta_time_freq(d_Time_aux)
f_Time_aux = map(float, date2num(d_Time_aux))
f_date_i = date2num(dt.datetime.strptime(IncF.c_Start_Date[0], '%d-%m-%Y'))
f_date_f = date2num(dt.datetime.strptime(IncF.c_Last_Date[0], '%d-%m-%Y'))
if planes:
f_Data, d_Time = time_crop(f_date_i, f_date_f, Dt, f_Time_aux, f_Data_aux, multiple=True)
f_elev, _ = time_crop(f_date_i, f_date_f, Dt, f_Time_aux, f_elev_aux, multiple=True)
else:
f_Data, d_Time = time_crop(f_date_i, f_date_f, Dt, f_Time_aux, f_Data_aux)
f_elev, _ = time_crop(f_date_i, f_date_f, Dt, f_Time_aux, f_elev_aux)
return f_Data, f_lat, f_lon, f_elev, d_Time, c_TimeFreq, c_Units
def read_periodfile(c_Model, c_Var):
# Routine to read variable from hourly (HH) file
c_Files = ops.find(IncF.c_ModelDir, 'PP_' + c_Model + '-2D*')
if c_Files is None:
return None
c_FileName = c_Files[0]
c_ModelFile = c_FileName.split(os.sep)[-1]
logging.info('Reading File: %s', c_ModelFile)
new_data = ncfile.read_ncfile(c_FileName, c_Var)
if new_data is not None:
f_Data_aux, f_lat, f_lon, d_Time_aux, c_Units = new_data
else:
return None
# CHECKING FOR TIME FREQUENCIES BETWEEN TIME STEPS
c_TimeFreq, Dt = delta_time_freq(d_Time_aux)
f_Time_aux = map(float, date2num(d_Time_aux))
f_date_i = date2num(dt.datetime.strptime(IncF.c_Start_Date[0], '%d-%m-%Y'))
f_date_f = date2num(dt.datetime.strptime(IncF.c_Last_Date[0], '%d-%m-%Y'))
f_Data, d_Time = time_crop(f_date_i, f_date_f, Dt, f_Time_aux, f_Data_aux)
return f_Data, f_lat, f_lon, d_Time, c_ModelFile, c_TimeFreq, c_Units
# Funciones auxiliares
def perform_read(model_name, var_name, dest_dict, read_routine):
"""
Retrieve data from read_routine and formats it into dest_disc
:param model_name: String. The name of the model.
:param var_name: String. The name of the variable.
:param dest_dict: Dictionary. Where data will be stored
:param read_routine: lambda. Lambda that wrap the read routine to use it as an argument for this function.
:return:
Perform the given read_routine and updates the data of dest_dict.
"""
if model_name not in dest_dict:
new_data = read_routine((model_name, var_name))
if new_data is not None:
f_data, f_lat, f_lon, d_time, c_ModelFile, c_TimeFreq, c_Units = new_data
else:
logging.warning('No data for model %s and variable %s', model_name, var_name)
return # Solo para detener la ejecucion.
t_temp = {'f_Lat': f_lat,
'f_Lon': f_lon,
'd_Time': np.array(d_time),
'f_Time': date2num(d_time),
'c_TimeFreq': c_TimeFreq}
dest_dict[model_name] = dict(t_temp)
dest_dict[model_name]['t_Units'] = dict()
else:
# TODO: Handle if new_data is None, same as above
f_data, _, _, _, _, _, c_Units = read_routine((model_name, var_name))
dest_dict[model_name][var_name] = f_data
dest_dict[model_name]['t_Units'][var_name] = c_Units
def read_mixedlayer(self, c_Network):
c_ObsNetDirs = IncDir.c_ObsNetDir
c_ObsNetName = IncDir.c_ObsNetName
idx_Net = c_ObsNetName.index(c_Network)
c_Files = os.listdir(c_ObsNetDirs[idx_Net])
logging.info('Data Directory: %s' % c_ObsNetDirs[idx_Net])
i_count = 0
if 'PBLHmunoz.txt' in c_Files:
c_FileName = 'PBLHmunoz.txt'
logging.info('Reading File: %s' % c_FileName)
f_AuxData = []
d_AuxDate = []
with open(c_ObsNetDirs[idx_Net] + c_FileName, 'r') as f:
for line in (row.split(',') for row in f):
if i_count > 0:
f_AuxData.append(float(line[3]))
d_AuxDate.append(dt.datetime.strptime(line[0], '%d-%m-%Y_%H:%M'))
i_count += 1
f_AuxDate = date2num(d_AuxDate)
f_date_i = date2num(dt.datetime.strptime(IncF.c_Start_Date[0], '%d-%m-%Y'))
f_date_f = date2num(dt.datetime.strptime(IncF.c_Last_Date[0], '%d-%m-%Y'))
f_Stntime = []
d_Stntime = []
f_date_aux = f_date_i
d_date_aux = num2date(f_date_i)
while f_date_aux <= f_date_f + 23 / 24.:
f_Stntime.append(date2num(d_date_aux))
d_Stntime.append(d_date_aux)
d_date_aux = d_date_aux + dt.timedelta(hours=1)
f_date_aux = date2num(d_date_aux)
f_Data = np.empty(len(f_Stntime))
f_Data.fill(IncF.f_FillValue)
f_Data[np.in1d(f_Stntime, f_AuxDate)] = f_AuxData
return f_Data, f_Stntime
def read_mcpheedata(self, c_FileName, i_ncol):
f_data_aux = []
f_date_aux = []
d_date_aux = []
f_date_i = date2num(dt.datetime.strptime(IncF.c_Start_Date[0], '%d-%m-%Y'))
f_date_f = date2num(dt.datetime.strptime(IncF.c_Last_Date[0], '%d-%m-%Y'))
f_Stntime = []
d_Stntime = []
f_time_aux = f_date_i
d_time_aux = num2date(f_date_i)
while f_time_aux <= f_date_f + 23 / 24.:
f_Stntime.append(date2num(d_time_aux))
d_Stntime.append(d_time_aux)
d_time_aux = d_time_aux + dt.timedelta(hours=1)
f_time_aux = date2num(d_time_aux)
with open(c_FileName, 'r') as f:
file_data = csv.reader(f, delimiter=',')
for f_row in list(file_data)[4::]:
c_Value = f_row[i_ncol].replace(',', '.')
if c_Value == '':
f_data_aux.append(IncF.f_FillValue)
else:
f_data_aux.append(float(c_Value))
d_date_utc = dt.datetime.strptime(f_row[0], '%d-%m-%Y %H:%M') + dt.timedelta(hours=4)
d_date_local = d_date_utc + dt.timedelta(hours=IncF.i_TimeZone)
d_date_aux.append(d_date_local)
f_date_aux.append(date2num(d_date_local))
f.close()
i_start = np.where(np.array(f_date_aux) >= f_date_i)[0][0]
i_end = np.where(np.array(f_date_aux) <= f_date_f)[-1][-1]
f_VarData = np.empty(len(f_Stntime))
f_VarData.fill(IncF.f_FillValue)
f_VarData[np.in1d(f_Stntime, f_date_aux)] = np.array(f_data_aux)[np.in1d(f_date_aux, f_Stntime)]
return f_VarData, f_Stntime, d_Stntime
def setxlim(self, size):
if self.main_x is None or self.main_y is None: return
xmax = max(self.main_x)
date = mdates.num2date(xmax).date()
if size == WindowSize.ONEDAY:
return # requires per min quotes
elif size == WindowSize.FIVEDAY:
return # requires per min quotes
elif size == WindowSize.ONEMONTH:
xmin = mdates.date2num(date-timedelta(days=30))
elif size == WindowSize.THREEMONTH:
xmin = mdates.date2num(date-timedelta(days=90))
elif size == WindowSize.SIXMONTH:
xmin = mdates.date2num(date-timedelta(days=180))
elif size == WindowSize.ONEYEAR:
xmin = mdates.date2num(date-timedelta(days=365))
elif size == WindowSize.TWOYEAR:
xmin = mdates.date2num(date-timedelta(days=365*2))
elif size == WindowSize.FIVEYEAR:
xmin = mdates.date2num(date-timedelta(days=365*5))
elif size == WindowSize.MAX:
xmin = min(self.main_x)
self.axes.set_xlim([xmin, xmax])
self.adjust_ylim(xmin, xmax)
self.fig.canvas.draw()
def on_loadQuoteClicked(self):
logger.info('load quote')
fileName = QtGui.QFileDialog.getOpenFileName(
self, self.tr("Open Quote Data"), data_path,
self.tr("Quote Files (*.csv)"))
logger.info("Filename %s" % fileName)
if os.path.isfile(fileName):
df = pd.read_csv(unicode(fileName))
df.columns = [col.lower() for col in df.columns]
if 'datetime' in df.columns:
df = df.sort(['datetime'])
df['datetime'] = df.apply(
lambda row: mdates.date2num(parser.parse(row['datetime'])),
axis=1)
elif 'date' in df.columns:
df = df.sort(['date'])
df['datetime'] = df.apply(
lambda row: mdates.date2num(parser.parse(row['date'])),
axis=1)
if 'datetime' in df.columns and not df['datetime'].empty:
self.ui_controller.matplotlibWidget.set_data(df)
self.ui_controller.matplotlibWidget.draw_data()
self.df = df
def plot_tick_range_normalised(tick_path, range_start, range_end):
if os.path.exists(tick_path) == False:
print(tick_path + ' file doesnt exist')
quit()
date_cols = ['RateDateTime']
df = pd.read_csv(tick_path, usecols=['RateDateTime','RateBid','RateAsk'])
start_index = tfh.find_index_closest_date(range_start, tick_path)
end_index = tfh.find_index_closest_date(range_end, tick_path)
# dont proceed if we didnt find indices
if (start_index is None or end_index is None):
print('start_index or end_index was None')
quit()
ticks_s = df.iloc[start_index:end_index]
ticks = ((ticks_s['RateAsk'] + ticks_s['RateBid']) / 2.0)
ticks_norm = (ticks - ticks.min()) / (ticks.max() - ticks.min())
dates_dt = [dt.datetime.strptime(str.split(x, '.')[0], '%Y-%m-%d %H:%M:%S') for x in ticks_s['RateDateTime'].values]
dates = mdates.date2num(dates_dt)
plt.plot_date(dates, ticks_norm, 'b-')
def getRDT(self):
"""
a.RDT or a.RDT()
convert dtype data into Rata Die (lat.) Time (days since 1/1/0001)
Returns
========
out : numpy array
elapsed days since 1/1/1
Examples
========
>>> a = Ticktock('2002-02-02T12:00:00', 'ISO')
>>> a.RDT
array([ 730883.5])
See Also
=========
getUTC, getUNX, getISO, getJD, getMJD, getCDF, getTAI, getDOY, geteDOY
"""
from matplotlib.dates import date2num, num2date
# import matplotlib.dates as mpd
# nTAI = len(self.data)
UTC = self.UTC
#RDT = np.zeros(nTAI)
RDT = datamodel.dmarray(date2num(UTC))
#for i in np.arange(nTAI):
#RDT[i] = UTC[i].toordinal() + UTC[i].hour/24. + UTC[i].minute/1440. + \
#UTC[i].second/86400. + UTC[i].microsecond/86400000000.
self.RDT = RDT
return self.RDT
# -----------------------------------------------
converter.py 文件源码
项目:PyDataLondon29-EmbarrassinglyParallelDAWithAWSLambda
作者: SignalMedia
项目源码
文件源码
阅读 30
收藏 0
点赞 0
评论 0
def _dt_to_float_ordinal(dt):
"""
Convert :mod:`datetime` to the Gregorian date as UTC float days,
preserving hours, minutes, seconds and microseconds. Return value
is a :func:`float`.
"""
if (isinstance(dt, (np.ndarray, Index, Series)) and
com.is_datetime64_ns_dtype(dt)):
base = dates.epoch2num(dt.asi8 / 1.0E9)
else:
base = dates.date2num(dt)
return base
# Datetime Conversion
converter.py 文件源码
项目:PyDataLondon29-EmbarrassinglyParallelDAWithAWSLambda
作者: SignalMedia
项目源码
文件源码
阅读 37
收藏 0
点赞 0
评论 0
def convert(values, unit, axis):
def try_parse(values):
try:
return _dt_to_float_ordinal(tools.to_datetime(values))
except Exception:
return values
if isinstance(values, (datetime, pydt.date)):
return _dt_to_float_ordinal(values)
elif isinstance(values, np.datetime64):
return _dt_to_float_ordinal(lib.Timestamp(values))
elif isinstance(values, pydt.time):
return dates.date2num(values)
elif (com.is_integer(values) or com.is_float(values)):
return values
elif isinstance(values, compat.string_types):
return try_parse(values)
elif isinstance(values, (list, tuple, np.ndarray, Index)):
if isinstance(values, Index):
values = values.values
if not isinstance(values, np.ndarray):
values = com._asarray_tuplesafe(values)
if com.is_integer_dtype(values) or com.is_float_dtype(values):
return values
try:
values = tools.to_datetime(values)
if isinstance(values, Index):
values = values.map(_dt_to_float_ordinal)
else:
values = [_dt_to_float_ordinal(x) for x in values]
except Exception:
pass
return values
converter.py 文件源码
项目:PyDataLondon29-EmbarrassinglyParallelDAWithAWSLambda
作者: SignalMedia
项目源码
文件源码
阅读 32
收藏 0
点赞 0
评论 0
def autoscale(self):
"""
Set the view limits to include the data range.
"""
dmin, dmax = self.datalim_to_dt()
if dmin > dmax:
dmax, dmin = dmin, dmax
# We need to cap at the endpoints of valid datetime
# TODO(wesm): unused?
# delta = relativedelta(dmax, dmin)
# try:
# start = dmin - delta
# except ValueError:
# start = _from_ordinal(1.0)
# try:
# stop = dmax + delta
# except ValueError:
# # The magic number!
# stop = _from_ordinal(3652059.9999999)
dmin, dmax = self.datalim_to_dt()
vmin = dates.date2num(dmin)
vmax = dates.date2num(dmax)
return self.nonsingular(vmin, vmax)
def get_mpl_time(ds, *freq):
"""Return a float wihich is usabale for plt.plot_date from matplotlib.
:param ds: core_faam dataset
:type param: netCDF4.Dataset
:param freq: frequency of the time stamp default=1; if freq > 1 a multidimensional array is returned
:return: array containing the matplotlib timestamps
:rtype: numpy.array
>>> ds = netCDF4.Dataset('core_faam_20130403_v004_r0_b768.nc', 'r')
>>> t_1hz = get_mpl_time(ds)
>>> t_1hz.shape
Out[1]: (37137,)
>>> t_32hz = get_mpl_time(ds, 32)
>>> t_32hz.shape
Out[1]: (37137, 32)
>>> plot_date(t_32hz.ravel(), ds.variables['U_C'][:].ravel(), 'b-')
>>>
"""
if 'Time' in ds.variables.keys():
vtime=ds.variables['Time'][:]
elif 'time' in ds.variables.keys():
vtime=ds.variables['time'][:]
elif 'TIME' in ds.variables.keys():
vtime=ds.variables['TIME'][:]
#in old core files the 'Time' variable was c2alled PARA0515
elif 'PARA0515' in ds.variables.keys():
vtime=ds.variables['PARA0515'][:]
else:
return None
vtime=np.array(vtime)
if freq:
rows = len(vtime)
vtime = vtime.repeat(freq[0]).reshape((rows, freq[0])) + np.array(range(freq[0]), dtype=np.float64)/freq[0]
result=np.float64(vtime/86400.) + np.float64(date2num(get_base_time(ds)))
return result
def get_mpl_time(ds, basetime=None, freq=1):
"""Return a matplotlib usable time format from the faam core netCDF4.
>>> ds = netCDF4.Dataset('core_faam_20130403_v004_r0_b768.nc', 'r')
>>> t_1hz = get_mpl_time(ds)
>>> t_1hz.shape
Out[1]: (37137,)
>>> t_32hz = get_mpl_time(ds, 32)
>>> t_32hz.shape
Out[1]: (37137, 32)
>>> plot_date(t_32hz.ravel(), ds.variables['U_C'][:].ravel(), 'b-')
>>>
"""
if hasattr(ds, 'variables'):
if 'Time' in ds.variables.keys():
vtime=ds.variables['Time'][:]
elif 'time' in ds.variables.keys():
vtime=ds.variables['time'][:]
elif 'TIME' in ds.variables.keys():
vtime=ds.variables['TIME'][:]
#in old core files the 'Time' variable was c2alled PARA0515
elif 'PARA0515' in ds.variables.keys():
vtime=ds.variables['PARA0515'][:]
elif isinstance(ds, dict):
if ds.has_key('Time'):
vtime=ds['Time']
else:
return None
import numpy as np
rows = len(vtime)
vtime = vtime.repeat(freq).reshape((rows, freq)) + np.array(range(freq), dtype=np.float64)/freq
if not basetime:
basetime=get_base_time(ds)
result=np.float64(vtime/86400.) + np.float64(date2num(basetime))
return result
def _format_date(self, tdoy):
'''
Convert to MatPlotLib date
'''
mpl_date = mdates.date2num(tdoy.dtobject)
return mpl_date