python类num2date()的实例源码

Model.py 文件源码 项目:MOSPAT 作者: CR2MOS 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def timevect(d_StartDate, d_EndDate, c_TimeFreq, DT=None):
    f_Time = []
    d_Time = []
    while d_StartDate <= d_EndDate:
        d_Time.append(d_StartDate)
        f_Time.append(date2num(d_StartDate))
        f_Date_aux = date2num(d_StartDate)
        if c_TimeFreq == 'Monthly':
            DT_aux = monthrange(num2date(f_Date_aux).year, num2date(f_Date_aux).month)[1]
            DT = dt.timedelta(days=DT_aux)
        elif c_TimeFreq == 'Yearly':
            # finding out if it is a leap-year
            if isleap(d_StartDate.year + 1):
                DT = dt.timedelta(days=366)
            else:
                DT = dt.timedelta(days=365)
        d_StartDate += DT

    return f_Time, d_Time
Ceilometer.py 文件源码 项目:MOSPAT 作者: CR2MOS 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def read(self, c_ObsVar):
        c_Network = self.name
        t_VarUnits = {'ZBK': 'bu', 'PBLH': 'm'}
        t_ObsStationData = dict()
        if c_ObsVar == 'ZBK' or c_ObsVar == 'PBLH':
            if c_ObsVar == 'ZBK':
                f_data, f_time, f_height = self.read_ceilfile(c_Network)
                t_ObsStationData['f_Height'] = f_height
            if c_ObsVar == 'PBLH':
                f_data, f_time = self.read_mixedlayer(c_Network)
            t_ObsStationData[c_ObsVar] = np.array([f_data])
            t_ObsStationData['c_StationName'] = np.array(['DGF'])
            t_ObsStationData['f_Time'] = np.array([f_time])
            t_ObsStationData['d_Time'] = np.array([num2date(f_time)])
            t_ObsStationData['f_Lat'] = np.array([-33.457])
            t_ObsStationData['f_Lon'] = np.array([-70.661])
            t_ObsStationData['f_Elevation'] = np.array([9999])
            t_ObsStationData['t_Units'] = dict()
            if c_ObsVar in t_VarUnits:
                t_ObsStationData['t_Units'][c_ObsVar] = t_VarUnits[c_ObsVar]
            else:
                t_ObsStationData['t_Units'][c_ObsVar] = None
            return t_ObsStationData, 'Hourly'
        else:
            return t_ObsStationData, None
breaks.py 文件源码 项目:mizani 作者: has2k1 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def __call__(self, limits):
        """
        Compute breaks

        Parameters
        ----------
        limits : tuple
            Minimum and maximum :class:`datetime.datetime` values.

        Returns
        -------
        out : array_like
            Sequence of break points.
        """
        if any(pd.isnull(x) for x in limits):
            return []

        ret = self.locator.tick_values(*limits)
        # MPL returns the tick_values in ordinal format,
        # but we return them in the same space as the
        # inputs.
        return [num2date(val) for val in ret]
uiCrosshair.py 文件源码 项目:InplusTrader_Linux 作者: zhengwsh 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def __getTickDatetimeByXPosition(self,xAxis):
        """mid
        ????????datetimeNum??x?
        ????view????????x???????tick?time?xAxis???index?????datetime??????datetimeNum
        return:str
        """        
        tickDatetimeRet = xAxis
        minYearDatetimeNum = mpd.date2num(dt.datetime(1900,1,1))
        if(xAxis > minYearDatetimeNum):
            tickDatetime = mpd.num2date(xAxis).astimezone(pytz.timezone('utc'))
            if(tickDatetime.year >=1900):
                tickDatetimeRet = tickDatetime 
        return tickDatetimeRet       

    #----------------------------------------------------------------------
monitor_tick_main.py 文件源码 项目:InplusTrader_Linux 作者: zhengwsh 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def getTickDatetimeByXPosition(self,xAxis):
        """mid
        ?????x??????????????
        """
        tickDatetimeRet = xAxis
        minYearDatetimeNum = mpd.date2num(dt.datetime(1900,1,1))
        if(xAxis > minYearDatetimeNum):
            tickDatetime = mpd.num2date(xAxis).astimezone(pytz.timezone('utc'))
            if(tickDatetime.year >=1900):
                tickDatetimeRet = tickDatetime 
        return tickDatetimeRet
htsPlot.py 文件源码 项目:htsprophet 作者: CollinRooney12 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def plotYearly(dictframe, ax, uncertainty, color='#0072B2'):

    if ax is None:
        figY = plt.figure(facecolor='w', figsize=(10, 6))
        ax = figY.add_subplot(111)
    else:
        figY = ax.get_figure()
    ##
    # Find the max index for an entry of each month
    ##
    months = dictframe.ds.dt.month
    ind = []
    for month in range(1,13):
        ind.append(max(months[months == month].index.tolist()))
    ##
    # Plot from the minimum of those maximums on (this will almost certainly result in only 1 year plotted)
    ##
    ax.plot(dictframe['ds'][min(ind):], dictframe['yearly'][min(ind):], ls='-', c=color)
    if uncertainty:
        ax.fill_between(dictframe['ds'].values[min(ind):], dictframe['yearly_lower'][min(ind):], dictframe['yearly_upper'][min(ind):], color=color, alpha=0.2)
    ax.grid(True, which='major', c='gray', ls='-', lw=1, alpha=0.2)
    months = MonthLocator(range(1, 13), bymonthday=1, interval=2)
    ax.xaxis.set_major_formatter(FuncFormatter(
        lambda x, pos=None: '{dt:%B} {dt.day}'.format(dt=num2date(x))))
    ax.xaxis.set_major_locator(months)
    ax.set_xlabel('Day of year')
    ax.set_ylabel('yearly')
    figY.tight_layout()
    return figY
pyGrav_main.py 文件源码 项目:pyGrav 作者: basileh 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def readMCGraviOutputfiles(self,datafordriftadj,output_root_dir,pattern="mix*",output_file_pattern="*.gra"):
        """
        Read output files *.lst files from the mix_ folders outputs from mcgravi

        populate output_dic dictionnary from each survey.
        At station numbers keys of this dictionary, one finds the following tuple:
        (station, gravity, std)
        """
        for survid,surv in datafordriftadj.survey_dic.iteritems():
            if surv.keepitem==1:
                survdir=output_root_dir+os.sep+surv.name     
                #identify every folders matching the given pattern
                folders=glob.glob(survdir+os.sep+pattern)
                #sort the folder list (mcgravi outputs folder names with prog execution date...)
                folders.sort()
                #get the last one
                folder=folders.pop()

                mcgravi_filename=glob.glob(folder+os.sep+output_file_pattern)

                #fill in the survey object:
                self.campaigndata.survey_dic[survid].read_from_mcgravi_output_file(mcgravi_filename[0])
                print "For survey: %s"%(num2date(survid))
                print "Station , g (mgal), SD (mgal)"                
                for tupid,tup in self.campaigndata.survey_dic[survid].output_dic.iteritems():
                    print "%d, %7.4f, %7.4f"%(tup)
Ceilometer.py 文件源码 项目:MOSPAT 作者: CR2MOS 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def read_mixedlayer(self, c_Network):
        c_ObsNetDirs = IncDir.c_ObsNetDir
        c_ObsNetName = IncDir.c_ObsNetName
        idx_Net = c_ObsNetName.index(c_Network)
        c_Files = os.listdir(c_ObsNetDirs[idx_Net])
        logging.info('Data Directory: %s' % c_ObsNetDirs[idx_Net])
        i_count = 0
        if 'PBLHmunoz.txt' in c_Files:
            c_FileName = 'PBLHmunoz.txt'
            logging.info('Reading File: %s' % c_FileName)
            f_AuxData = []
            d_AuxDate = []
            with open(c_ObsNetDirs[idx_Net] + c_FileName, 'r') as f:
                for line in (row.split(',') for row in f):
                    if i_count > 0:
                        f_AuxData.append(float(line[3]))
                        d_AuxDate.append(dt.datetime.strptime(line[0], '%d-%m-%Y_%H:%M'))
                    i_count += 1
            f_AuxDate = date2num(d_AuxDate)
            f_date_i = date2num(dt.datetime.strptime(IncF.c_Start_Date[0], '%d-%m-%Y'))
            f_date_f = date2num(dt.datetime.strptime(IncF.c_Last_Date[0], '%d-%m-%Y'))
            f_Stntime = []
            d_Stntime = []
            f_date_aux = f_date_i
            d_date_aux = num2date(f_date_i)
            while f_date_aux <= f_date_f + 23 / 24.:
                f_Stntime.append(date2num(d_date_aux))
                d_Stntime.append(d_date_aux)
                d_date_aux = d_date_aux + dt.timedelta(hours=1)
                f_date_aux = date2num(d_date_aux)
            f_Data = np.empty(len(f_Stntime))
            f_Data.fill(IncF.f_FillValue)
            f_Data[np.in1d(f_Stntime, f_AuxDate)] = f_AuxData
            return f_Data, f_Stntime
McPhee.py 文件源码 项目:MOSPAT 作者: CR2MOS 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def read_mcpheedata(self, c_FileName, i_ncol):
        f_data_aux = []
        f_date_aux = []
        d_date_aux = []
        f_date_i = date2num(dt.datetime.strptime(IncF.c_Start_Date[0], '%d-%m-%Y'))
        f_date_f = date2num(dt.datetime.strptime(IncF.c_Last_Date[0], '%d-%m-%Y'))
        f_Stntime = []
        d_Stntime = []
        f_time_aux = f_date_i
        d_time_aux = num2date(f_date_i)

        while f_time_aux <= f_date_f + 23 / 24.:
            f_Stntime.append(date2num(d_time_aux))
            d_Stntime.append(d_time_aux)
            d_time_aux = d_time_aux + dt.timedelta(hours=1)
            f_time_aux = date2num(d_time_aux)

        with open(c_FileName, 'r') as f:
            file_data = csv.reader(f, delimiter=',')
            for f_row in list(file_data)[4::]:
                c_Value = f_row[i_ncol].replace(',', '.')
                if c_Value == '':
                    f_data_aux.append(IncF.f_FillValue)
                else:
                    f_data_aux.append(float(c_Value))
                d_date_utc = dt.datetime.strptime(f_row[0], '%d-%m-%Y %H:%M') + dt.timedelta(hours=4)
                d_date_local = d_date_utc + dt.timedelta(hours=IncF.i_TimeZone)
                d_date_aux.append(d_date_local)
                f_date_aux.append(date2num(d_date_local))
            f.close()
            i_start = np.where(np.array(f_date_aux) >= f_date_i)[0][0]
            i_end = np.where(np.array(f_date_aux) <= f_date_f)[-1][-1]
            f_VarData = np.empty(len(f_Stntime))
            f_VarData.fill(IncF.f_FillValue)
            f_VarData[np.in1d(f_Stntime, f_date_aux)] = np.array(f_data_aux)[np.in1d(f_date_aux, f_Stntime)]

        return f_VarData, f_Stntime, d_Stntime
matplotlibwidget.py 文件源码 项目:quantdigger 作者: andyzsf 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def mouse_move(self, event):
        if not event.inaxes: return
        if self.x is None or self.y is None: return
        x, y = event.xdata, event.ydata
        idx = np.searchsorted(self.x, x)
        if idx >= len(self.x): return
        x = self.x[idx]
        y = self.y[idx]
        # update the line positions self.ly.set_xdata(x)
        self.lx.set_ydata(y)
        self.ly.set_xdata(x)

        text = []
        open = self.open[idx] if self.open is not None and idx < len(self.open) else None
        close = self.close[idx] if self.close is not None and idx < len(self.close) else None
        high = self.high[idx] if self.high is not None and idx < len(self.high) else None
        low = self.low[idx] if self.low is not None and idx < len(self.low) else None
        vol = self.vol[idx] if self.vol is not None and idx < len(self.vol) else None
        day = mdates.num2date(x)
        if day.time() == time(0,0):
            date_str = datetime.strftime(day, '%b %d %Y')
        else:
            date_str = datetime.strftime(day, '%b %d %Y %H:%M:%S')
        text.append("{0:>5s} {1:<12s}".format('Date', date_str))
        if open:
            text.append("{0:>5s} {1:.2f}".format('Open', open))
        if close:
            text.append("{0:>5s} {1:.2f}".format('Close', close))
        if high:
            text.append("{0:>5s} {1:.2f}".format('High', high))
        if low:
            text.append("{0:>5s} {1:.2f}".format('Low', low))
        if vol:
            text.append("{0:>5s} {1:.2f}M".format('Vol', (float(vol)/1000000)))
        self.txt.set_text('\n'.join(text))
matplotlibwidget.py 文件源码 项目:quantdigger 作者: andyzsf 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def setxlim(self, size):
        if self.main_x is None or self.main_y is None: return
        xmax = max(self.main_x)
        date = mdates.num2date(xmax).date()
        if size == WindowSize.ONEDAY:
            return # requires per min quotes
        elif size == WindowSize.FIVEDAY:
            return # requires per min quotes
        elif size == WindowSize.ONEMONTH:
            xmin = mdates.date2num(date-timedelta(days=30))
        elif size == WindowSize.THREEMONTH:
            xmin = mdates.date2num(date-timedelta(days=90))
        elif size == WindowSize.SIXMONTH:
            xmin = mdates.date2num(date-timedelta(days=180))
        elif size == WindowSize.ONEYEAR:
            xmin = mdates.date2num(date-timedelta(days=365))
        elif size == WindowSize.TWOYEAR:
            xmin = mdates.date2num(date-timedelta(days=365*2))
        elif size == WindowSize.FIVEYEAR:
            xmin = mdates.date2num(date-timedelta(days=365*5))
        elif size == WindowSize.MAX:
            xmin = min(self.main_x)

        self.axes.set_xlim([xmin, xmax])
        self.adjust_ylim(xmin, xmax)
        self.fig.canvas.draw()
time.py 文件源码 项目:heliopy 作者: heliopython 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def getRDT(self):
        """
        a.RDT or a.RDT()

        convert dtype data into Rata Die (lat.) Time (days since 1/1/0001)

        Returns
        ========
        out : numpy array
            elapsed days since 1/1/1

        Examples
        ========
        >>> a = Ticktock('2002-02-02T12:00:00', 'ISO')
        >>> a.RDT
        array([ 730883.5])

        See Also
        =========
        getUTC, getUNX, getISO, getJD, getMJD, getCDF, getTAI, getDOY, geteDOY

        """
        from matplotlib.dates import date2num, num2date
        #        import matplotlib.dates as mpd

        # nTAI = len(self.data)
        UTC = self.UTC
        #RDT = np.zeros(nTAI)
        RDT = datamodel.dmarray(date2num(UTC))
        #for i in np.arange(nTAI):
            #RDT[i] = UTC[i].toordinal() + UTC[i].hour/24. + UTC[i].minute/1440. + \
                #UTC[i].second/86400. + UTC[i].microsecond/86400000000.

        self.RDT = RDT
        return self.RDT

    # -----------------------------------------------
ui.py 文件源码 项目:autoxd 作者: nessessary 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def weekday_candlestick(ohlc_data, ax, fmt='%b %d', freq=7, **kwargs):
    """ Wrapper function for matplotlib.finance.candlestick_ohlc
        that artificially spaces data to avoid gaps from weekends 
    ??????????
    fmt: ????
    freq: ???????
    """

    # Convert data to numpy array
    ohlc_data_arr = np.array(ohlc_data)
    ohlc_data_arr2 = np.hstack(
        [np.arange(ohlc_data_arr[:,0].size)[:,np.newaxis], ohlc_data_arr[:,1:]])
    ndays = ohlc_data_arr2[:,0]  # array([0, 1, 2, ... n-2, n-1, n])

    # Convert matplotlib date numbers to strings based on `fmt`
    dates = mdates.num2date(ohlc_data_arr[:,0])
    date_strings = []
    for date in dates:
        date_strings.append(date.strftime(fmt))

    # Plot candlestick chart
    mpf.candlestick_ohlc(ax, ohlc_data_arr2, **kwargs)

    # Format x axis
    ax.set_xticks(ndays[::freq])
    ax.set_xticklabels(date_strings[::freq], rotation=45, ha='right')
    ax.set_xlim(ndays.min(), ndays.max())
forecaster.py 文件源码 项目:prophet 作者: facebook 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def plot_yearly(self, ax=None, uncertainty=True, yearly_start=0):
        """Plot the yearly component of the forecast.

        Parameters
        ----------
        ax: Optional matplotlib Axes to plot on. One will be created if
            this is not provided.
        uncertainty: Optional boolean to plot uncertainty intervals.
        yearly_start: Optional int specifying the start day of the yearly
            seasonality plot. 0 (default) starts the year on Jan 1. 1 shifts
            by 1 day to Jan 2, and so on.

        Returns
        -------
        a list of matplotlib artists
        """
        artists = []
        if not ax:
            fig = plt.figure(facecolor='w', figsize=(10, 6))
            ax = fig.add_subplot(111)
        # Compute yearly seasonality for a Jan 1 - Dec 31 sequence of dates.
        days = (pd.date_range(start='2017-01-01', periods=365) +
                pd.Timedelta(days=yearly_start))
        df_y = self.seasonality_plot_df(days)
        seas = self.predict_seasonal_components(df_y)
        artists += ax.plot(
            df_y['ds'].dt.to_pydatetime(), seas['yearly'], ls='-', c='#0072B2')
        if uncertainty:
            artists += [ax.fill_between(
                df_y['ds'].dt.to_pydatetime(), seas['yearly_lower'],
                seas['yearly_upper'], color='#0072B2', alpha=0.2)]
        ax.grid(True, which='major', c='gray', ls='-', lw=1, alpha=0.2)
        months = MonthLocator(range(1, 13), bymonthday=1, interval=2)
        ax.xaxis.set_major_formatter(FuncFormatter(
            lambda x, pos=None: '{dt:%B} {dt.day}'.format(dt=num2date(x))))
        ax.xaxis.set_major_locator(months)
        ax.set_xlabel('Day of year')
        ax.set_ylabel('yearly')
        return artists
utils.py 文件源码 项目:faampy 作者: ncasuk 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def core_to_pandas(ds):
    """converts a netCDF4.Dataset into a pandas Dataframe using the timestamp
    as index.

    ..note: Only the first column of the two dimensional data set is grabbed,
            because of performance issues.

    :param ds: core_faam dataset
    :type param: netCDF4.Dataset
    :return: pandas.Dataframe
    :type return: pandas.Dataframe

    """

    # TODO: make it work for the full dataset too
    vars=sorted(ds.variables.keys())
    vars.remove('Time')

    index=get_mpl_time(ds, 1)
    index=num2date(index.ravel())
    #initialize an empty Dataframe
    df=pd.DataFrame(index=index)
    for v in vars:
        shp=ds.variables[v].shape
        if len(shp) ==2:
            data=np.copy(ds.variables[v][:,0].data).ravel()
        else:
            data=np.copy(ds.variables[v][:].data)
        df_tmp=pd.DataFrame(data[:], index=index, columns=[v,])
        df = pd.concat([df, df_tmp], axis=1)
    #set all missing values to nan
    df[df == -9999.0] = np.nan
    #set timezone to None otherwise there might be issues merging the data
    #frame with others    
    df.index.tz=None
    return df
nc_to_gpx.py 文件源码 项目:faampy 作者: ncasuk 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def nc_to_gpx(ncfile, outpath):
    ds = netCDF4.Dataset(ncfile, 'r')
    lon = ds.variables['LON_GIN'][:, 0]
    lat = ds.variables['LAT_GIN'][:, 0]
    alt = ds.variables['ALT_GIN'][:, 0]
    t = num2date(get_mpl_time(ds))

    outfilename = '%s_%s.gpx' % (get_fid(ds), get_base_time(ds).strftime('%Y%m%d'))
    outfile = os.path.join(outpath, outfilename)
    outtxt = lonlatalt_to_gpx(lon, lat, alt, t)
    f = open(outfile, 'w')
    f.write(outtxt)
    ds.close()
    f.close()
cp.py 文件源码 项目:faampy 作者: ncasuk 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def read_core_cloud(ifile):
    """reads in the core cloud data and 

    :param str ifile: core cloud netcdf file
    :return: pandas.Dataframe
    :type return: pandas.Dataframe

    """
    ds = netCDF4.Dataset(ifile, 'r')
    vars = sorted(ds.variables.keys())
    vars.remove('Time')

    # create and indexed pandas DataFrame
    tindex = get_mpl_time(ds, 1)
    tindex = num2date(tindex.ravel())
    # initialize an empty Dataframe
    df = pd.DataFrame(index=tindex)
    for v in vars:
        shp = ds.variables[v].shape
        if not shp[0] == len(index):
            continue
        if len(shp) == 2:
            data = np.copy(ds.variables[v][:,0].data).ravel()
        else:
            data = np.copy(ds.variables[v][:])
        df_tmp = pd.DataFrame(data[:].ravel(), index=index, columns=[v,])
        df = pd.concat([df, df_tmp], axis=1)

    df[df == -9999.0] = np.nan  # set all missing values to nan
    t = df.index.values
    df['timestamp'] = t.astype('datetime64[s]')  # Converting index data type

    # TODO: Check that this is really necessary
    # set timezone to None otherwise there might be issues merging
    # this DataFrame with others    
    df.index.tz = None
    return df
spanviews.py 文件源码 项目:inspector 作者: WattyAB 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def from_xaxis(self, value):
        if self.axis_has_datelocator(self.axes.xaxis):
            return num2date(value).replace(tzinfo=None)
        else:
            return value
transforms.py 文件源码 项目:mizani 作者: has2k1 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def inverse(x):
        """
        Transform to date from numerical format
        """
        return num2date(x)
Model.py 文件源码 项目:MOSPAT 作者: CR2MOS 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def time_crop(f_init_date, f_final_date, delta, f_time_array, data_array, multiple=False):
    """
    Crop the data_array between f_init_date and f_final_date.

    :param f_init_date: Float. Initial date
    :param f_final_date: Float. Final date
    :param delta: a datetime instance to step in dates
    :param f_time_array: Float array. All dates of data_array
    :param data_array: The data to be cropped. Its shape must be of the form (time, ...)
    :param multiple: False, just one data_array. True a list of data_arrays.
    :return: Cropped data and according datetime list.
    """
    i_start = np.where(np.array(f_time_array) >= f_init_date)[0][0]
    i_end = np.where(np.array(f_time_array) <= f_final_date + 23 / 24.)[0][-1]

    # TODO: Refactor this while. You can transform delta and operate only on f_dates and then convert the entire list.
    d_date = num2date(f_init_date).replace(minute=0)
    d_Time = []
    f_Time = []
    while f_init_date <= f_final_date + 23 / 24.:
        d_Time.append(d_date)
        f_Time.append(date2num(d_date))
        d_date = d_date + delta
        f_init_date = date2num(d_date)

    if multiple:
        all_cropped_data = []
        for data in data_array:
            new_shape = [len(d_Time)]
            new_shape.extend(list(data.shape[1:]))
            new_shape = tuple(new_shape)
            cropped_data = np.empty(new_shape)
            cropped_data.fill(np.nan)
            # TODO: Use find_nearest
            cropped_data[np.in1d(f_Time, f_time_array[i_start:i_end + 1])] = data[i_start:i_end + 1]
            all_cropped_data.append(cropped_data)
        return all_cropped_data, d_Time
    else:
        new_shape = [len(d_Time)]
        new_shape.extend(list(data_array.shape[1:]))
        new_shape = tuple(new_shape)
        cropped_data = np.empty(new_shape)
        cropped_data.fill(np.nan)
        cropped_data[np.in1d(f_Time, f_time_array[i_start:i_end + 1])] = data_array[i_start:i_end + 1]
        return cropped_data, d_Time
time.py 文件源码 项目:heliopy 作者: heliopython 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def randomDate(dt1, dt2, N=1, tzinfo=False, sorted=False):
    """
    Return a (or many) random datetimes between two given dates, this is done under the convention dt <=1 rand < dt2

    Parameters
    ==========
    dt1 : datetime.datetime
        start date for the the random date
    dt2 : datetime.datetime
        stop date for the the random date

    Other Parameters
    ================
    N : int (optional)
        the number of random dates to generate (defualt=1)
    tzinfo : bool (optional)
        maintain the tzinfo of the input datetimes (default=False)
    sorted : bool (optional)
        return the times sorted (default=False)

    Returns
    =======
    out : datetime.datetime or numpy.ndarray of datetime.datetime
        the new time for the next call to EventTimer

    Examples
    ========
    """
    from matplotlib.dates import date2num, num2date

    if dt1.tzinfo != dt2.tzinfo:
        raise(ValueError('tzinfo for the input and output datetimes must match'))
    dt1n = date2num(dt1)
    dt2n = date2num(dt2)
    rnd_tn = np.random.uniform(dt1n, dt2n, size=N)
    rnd_t = num2date(rnd_tn)
    if not tzinfo:
        tzinfo = None
    else:
        tzinfo = dt1.tzinfo
    rnd_t = np.asarray([val.replace(tzinfo=tzinfo) for val in rnd_t])
    if sorted:
        rnd_t.sort()
    return rnd_t
__init__.py 文件源码 项目:heliopy 作者: heliopython 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def logspace(min, max, num, **kwargs):
    """
    Returns log-spaced bins. Same as numpy.logspace except the min and max are the min and max
    not log10(min) and log10(max)

    Parameters
    ==========
    min : float
        minimum value
    max : float
        maximum value
    num : integer
        number of log spaced bins

    Other Parameters
    ================
    kwargs : dict
        additional keywords passed into matplotlib.dates.num2date

    Returns
    =======
    out : array
        log-spaced bins from min to max in a numpy array

    Notes
    =====
    This function works on both numbers and datetime objects

    Examples
    ========
    >>> import spacepy.toolbox as tb
    >>> tb.logspace(1, 100, 5)
    array([   1.        ,    3.16227766,   10.        ,   31.6227766 ,  100.        ])

    See Also
    ========
    geomspace
    linspace
    """
    if isinstance(min, datetime.datetime):
        from matplotlib.dates import date2num, num2date
        ans = num2date(np.logspace(np.log10(date2num(min)), np.log10(date2num(max)), num, **kwargs))
        ans = spt.no_tzinfo(ans)
        return np.array(ans)
    else:
        return np.logspace(np.log10(min), np.log10(max), num, **kwargs)
__init__.py 文件源码 项目:heliopy 作者: heliopython 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def linspace(min, max, num, **kwargs):
    """
    Returns linear-spaced bins. Same as numpy.linspace except works with datetime
    and is faster

    Parameters
    ==========
    min : float, datetime
        minimum value
    max : float, datetime
        maximum value
    num : integer
        number of linear spaced bins

    Other Parameters
    ================
    kwargs : dict
        additional keywords passed into matplotlib.dates.num2date

    Returns
    =======
    out : array
        linear-spaced bins from min to max in a numpy array

    Notes
    =====
    This function works on both numbers and datetime objects

    Examples
    ========
    >>> import spacepy.toolbox as tb
    >>> tb.linspace(1, 10, 4)
    array([  1.,   4.,   7.,  10.])

    See Also
    ========
    geomspace
    logspace
    """
    if hasattr(min, 'shape') and min.shape is ():
        min = min.item()
    if hasattr(max, 'shape') and max.shape is ():
        max = max.item()
    if isinstance(min, datetime.datetime):
        from matplotlib.dates import date2num, num2date
        ans = num2date(np.linspace(date2num(min), date2num(max), num, **kwargs))
        ans = spt.no_tzinfo(ans)
        return np.array(ans)
    else:
        return np.linspace(min, max, num, **kwargs)
forecaster.py 文件源码 项目:prophet 作者: facebook 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def plot_seasonality(self, name, ax=None, uncertainty=True):
        """Plot a custom seasonal component.

        Parameters
        ----------
        ax: Optional matplotlib Axes to plot on. One will be created if
            this is not provided.
        uncertainty: Optional boolean to plot uncertainty intervals.

        Returns
        -------
        a list of matplotlib artists
        """
        artists = []
        if not ax:
            fig = plt.figure(facecolor='w', figsize=(10, 6))
            ax = fig.add_subplot(111)
        # Compute seasonality from Jan 1 through a single period.
        start = pd.to_datetime('2017-01-01 0000')
        period = self.seasonalities[name]['period']
        end = start + pd.Timedelta(days=period)
        plot_points = 200
        days = pd.to_datetime(np.linspace(start.value, end.value, plot_points))
        df_y = self.seasonality_plot_df(days)
        seas = self.predict_seasonal_components(df_y)
        artists += ax.plot(df_y['ds'].dt.to_pydatetime(), seas[name], ls='-',
                            c='#0072B2')
        if uncertainty:
            artists += [ax.fill_between(
                df_y['ds'].dt.to_pydatetime(), seas[name + '_lower'],
                seas[name + '_upper'], color='#0072B2', alpha=0.2)]
        ax.grid(True, which='major', c='gray', ls='-', lw=1, alpha=0.2)
        xticks = pd.to_datetime(np.linspace(start.value, end.value, 7)
            ).to_pydatetime()
        ax.set_xticks(xticks)
        if period <= 2:
            fmt_str = '{dt:%T}'
        elif period < 14:
            fmt_str = '{dt:%m}/{dt:%d} {dt:%R}'
        else:
            fmt_str = '{dt:%m}/{dt:%d}'
        ax.xaxis.set_major_formatter(FuncFormatter(
            lambda x, pos=None: fmt_str.format(dt=num2date(x))))
        ax.set_xlabel('ds')
        ax.set_ylabel(name)
        return artists
nc_to_csv.py 文件源码 项目:faampy 作者: ncasuk 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def process(ds, varnames, outfile=None, include_flag=False):
    """                                                                                                                                                      
    :param ds: netCDF4.Dataset object                                                                                                                        
    :param varnamess: list of variables ot                                                                                                                        
    :param outfile: csv filename output
    :param include_flag: adding flags

    """

    if include_flag:
        new_varnames = []
        for p in varnames:
            new_varnames.append(p)
            new_varnames.append(p+'_FLAG')
        varnames = new_varnames

    n = ds.variables['Time'][:].size
    dt = [i[0].strftime('%Y-%m-%d %H:%M:%S') for i in num2date(get_mpl_time(ds, 1))]

    header =','.join(['utc',] + varnames)

    _dtype = [('utc', 'S20'), ]
    _fmt = ['%s', ]
    for v in varnames:
        if v.endswith('_FLAG'):
            _dtype.append((v, int))
            _fmt.append('%i')
        else:
            _dtype.append((v, float))
            _fmt.append('%.3f')

    result = np.recarray((n,), dtype=_dtype)

    for v in result.dtype.fields.keys():
        if v.endswith('_FLAG'):
            result[v] = 3
        else:
            result[v] = -9999.
    result['utc'] = dt

    for v in varnames:
        if v.lower() in [i.lower() for i in ds.variables.keys()]:
            if len(ds.variables[v][:].shape) == 2:
                result[v] = ds.variables[v][:, 0]
            else:
                result[v] = ds.variables[v][:]
            if v.endswith('_FLAG'):
                result[v][result[v] < 0] = 3

    if outfile:
        lines = []
        for r in result:
            lines.append(','.join(_fmt[:]) % tuple(list(r)[:]))       
        out = open(outfile, 'w')
        out.write(header + '\n' + '\n'.join(lines) + '\n')
        out.close()
    return (result, header)
co.py 文件源码 项目:faampy 作者: ncasuk 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def get_cal_coefficients(data):
    """The calibration coefficients for the AL5002 instrument drift
    inbetween calibrations. It is assumed that the drifting is linear
    and too take account of this new coefficients are calculated for
    each data point, which are then used to recalculate the CO concentrations.

    """

    sens = data['AL52CO_sens'].ravel()
    zero = data['AL52CO_zero'].ravel()
    utc_time = [time.mktime(num2date(i).timetuple()) for \
                i in data['mpl_timestamp'][:,0]]
    # create copies of sens and zero calibration coefficients
    sens_new, zero_new = sens[:], zero[:]
    # get calibration periods
    ix=np.where(sens[1:]-sens[:-1] != 0)[0]
    # remove nan values
    ix=ix[~np.isnan((sens[1:]-sens[:-1])[ix])]
    # ignore the first 100 data points
    ix=ix[ix>100]
    # the +2 is a dodgy way to make sure that the values have changed.
    # Apparently the zero and sens parameters do not change at
    # exactly the same time in the data stream
    ix=[10]+list(ix+2)+[sens.size-2]
    # loop over all calibration periods
    table=[]
    for i in range(len(ix)-1):
        ix1=ix[i]
        ix2=ix[i+1]
        sens_new[ix1:ix2]=np.interp(utc_time[ix1:ix2], np.float32([utc_time[ix1], utc_time[ix2]]), [sens[ix1], sens[ix2]])
        zero_new[ix1:ix2]=np.interp(utc_time[ix1:ix2], np.float32([utc_time[ix1], utc_time[ix2]]), [zero[ix1], zero[ix2]])
        # write calibration information to stdout
        timestamp_start=datetime.datetime.utcfromtimestamp(utc_time[ix1]).strftime('%Y-%m-%d %H:%M:%S')
        timestamp_end=datetime.datetime.utcfromtimestamp(utc_time[ix2]).strftime('%Y-%m-%d %H:%M:%S')
        if np.isnan(sens[ix1]):
            sens_string='   nan'
        else:
            sens_string='%6.2f' % (sens[ix1],)
        if np.isnan(zero[ix1]):
            zero_string='   nan'
        else:
        zero_string='%6i' % (zero[ix1],)
        table.append([timestamp_start, timestamp_end, sens_string, zero_string])

    return table


问题


面经


文章

微信
公众号

扫码关注公众号