def from_frontend_value(key, value):
"""Returns a `SiteConfiguration` object value for the relevant `key` and
JSON-serializable `value`, applying any transformation reversed by to_frontend_value."""
if key == NICETIES_OPEN:
from datetime import timedelta
return timedelta(days=value)
elif key == CLOSING_TIME:
from datetime import datetime
return datetime.strptime(value, '%H:%M').time()
elif key == CLOSING_BUFFER:
from datetime import timedelta
return timedelta(minutes=value)
elif key == CACHE_TIMEOUT:
from datetime import timedelta
return timedelta(seconds=value)
elif key == INCLUDE_FACULTY:
return value
elif key == INCLUDE_RESIDENTS:
return value
else:
raise ValueError('No such config key!')
python类strptime()的实例源码
def test(self):
cx = self.Symbol_Db['equity'].find()
symbolSet = set([d['code'] for d in cx])
for code in symbolSet:
start = self.Symbol_Db['equity'].find({"code" : code})[0]['timeToMarket']
try:
start = datetime.datetime.strptime(str(start), '%Y%m%d')
except :
print code
start = start.strftime("%Y-%m-%d")
print start
return
#----------------------------------------------------------------------
test_timeseries.py 文件源码
项目:PyDataLondon29-EmbarrassinglyParallelDAWithAWSLambda
作者: SignalMedia
项目源码
文件源码
阅读 27
收藏 0
点赞 0
评论 0
def test_NaT_methods(self):
# GH 9513
raise_methods = ['astimezone', 'combine', 'ctime', 'dst',
'fromordinal', 'fromtimestamp', 'isocalendar',
'strftime', 'strptime', 'time', 'timestamp',
'timetuple', 'timetz', 'toordinal', 'tzname',
'utcfromtimestamp', 'utcnow', 'utcoffset',
'utctimetuple']
nat_methods = ['date', 'now', 'replace', 'to_datetime', 'today']
nan_methods = ['weekday', 'isoweekday']
for method in raise_methods:
if hasattr(NaT, method):
self.assertRaises(ValueError, getattr(NaT, method))
for method in nan_methods:
if hasattr(NaT, method):
self.assertTrue(np.isnan(getattr(NaT, method)()))
for method in nat_methods:
if hasattr(NaT, method):
self.assertIs(getattr(NaT, method)(), NaT)
# GH 12300
self.assertEqual(NaT.isoformat(), 'NaT')
def testTimestampValue(self):
"""Checks whether the timestamp attribute in the XML output is valid.
Runs a test program that generates an empty XML output, and checks if
the timestamp attribute in the testsuites tag is valid.
"""
actual = self._GetXmlOutput('gtest_no_test_unittest', [], 0)
date_time_str = actual.documentElement.getAttributeNode('timestamp').value
# datetime.strptime() is only available in Python 2.5+ so we have to
# parse the expected datetime manually.
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
re.match,
'XML datettime string %s has incorrect format' % date_time_str)
date_time_from_xml = datetime.datetime(
year=int(match.group(1)), month=int(match.group(2)),
day=int(match.group(3)), hour=int(match.group(4)),
minute=int(match.group(5)), second=int(match.group(6)))
time_delta = abs(datetime.datetime.now() - date_time_from_xml)
# timestamp value should be near the current local time
self.assertTrue(time_delta < datetime.timedelta(seconds=600),
'time_delta is %s' % time_delta)
actual.unlink()
def test_rapideye_sensor(self):
'''
Test an instance of the rapideye sensor class to check if it parses its
attributes correctly.
'''
import madmex.mapper.sensor.rapideye as rapideye
import datetime
path = '/LUSTRE/MADMEX/eodata/rapideye/1447720/2013/2013-02-11/l3a/1447720_2013-02-11_RE3_3A_182802_metadata.xml'
sensor = rapideye.Sensor(path)
self.assertEqual(sensor.get_attribute(rapideye.ANGLE), 3.96)
self.assertEqual(sensor.get_attribute(rapideye.PRODUCT_NAME), 'L3A')
self.assertEqual(sensor.get_attribute(rapideye.SENSOR), 'OPTICAL')
self.assertEqual(sensor.get_attribute(rapideye.PLATFORM), 'RE-3')
self.assertEqual(sensor.get_attribute(rapideye.CREATION_DATE), '2013-04-26T17:48:34Z')
self.assertEqual(sensor.get_attribute(rapideye.ACQUISITION_DATE), datetime.datetime.strptime('2013-02-11T18:04:21.337522Z', "%Y-%m-%dT%H:%M:%S.%fZ"))
self.assertEqual(sensor.get_attribute(rapideye.CLOUDS), 0.0)
self.assertEqual(sensor.get_attribute(rapideye.AZIMUTH_ANGLE), 278.21)
self.assertEqual(sensor.get_attribute(rapideye.SOLAR_AZIMUTH), 162.0359)
self.assertEqual(sensor.get_attribute(rapideye.SOLAR_ZENITH), 56.02738)
self.assertEqual(sensor.get_attribute(rapideye.TILE_ID), 1447720)
def str_to_datetime_processor_factory(regexp, type_):
rmatch = regexp.match
# Even on python2.6 datetime.strptime is both slower than this code
# and it does not support microseconds.
has_named_groups = bool(regexp.groupindex)
def process(value):
if value is None:
return None
else:
try:
m = rmatch(value)
except TypeError:
raise ValueError("Couldn't parse %s string '%r' "
"- value is not a string." %
(type_.__name__, value))
if m is None:
raise ValueError("Couldn't parse %s string: "
"'%s'" % (type_.__name__, value))
if has_named_groups:
groups = m.groupdict(0)
return type_(**dict(list(zip(
iter(groups.keys()),
list(map(int, iter(groups.values())))
))))
else:
return type_(*list(map(int, m.groups(0))))
return process
def datetime_to_ms(dt):
"""
Convert an unaware datetime object to milliseconds. This will
be a UTC time. The SMC stores all times in UTC and will do the
time conversions based on the local timezone.
Example of converting a datetime to milliseconds::
utc_time = datetime.strptime("2018-06-04T00:00:00", "%Y-%m-%dT%H:%M:%S")
datetime_to_ms(utc_time)
:param dt datetime: pass in python datetime object.
:return: value representing the datetime in milliseconds
:rtype: int
"""
return int(time.mktime(dt.timetuple()) * 1000)
def verifyForcedDates(fdates):
from datetime import datetime
import re
force_dates = [];
for fdate in fdates:
if (re.match(r"\d+-\d+-\d+", fdate) and
datetime.strptime(fdate, '%Y-%m-%d').strftime('%Y-%m-%d') == fdate):
force_dates.append(fdate);
return force_dates
def dayStringAdd(anaday, days = 0, weeks = 0):
from datetime import datetime, timedelta
return (datetime.strptime(anaday, '%Y-%m-%d') + timedelta(days=days, weeks=weeks)).strftime('%Y-%m-%d')
def dayStringBeforeDelta(anaday, tdelta):
from datetime import datetime
return (datetime.strptime(anaday, '%Y-%m-%d') - tdelta).strftime('%Y-%m-%d')
def date(argument):
formats = ('%Y/%m/%d', '%Y-%m-%d')
for fmt in formats:
try:
return datetime.strptime(argument, fmt)
except ValueError:
continue
raise commands.BadArgument('Cannot convert to date. Expected YYYY/MM/DD or YYYY-MM-DD.')
def str_to_datetime_processor_factory(regexp, type_):
rmatch = regexp.match
# Even on python2.6 datetime.strptime is both slower than this code
# and it does not support microseconds.
has_named_groups = bool(regexp.groupindex)
def process(value):
if value is None:
return None
else:
try:
m = rmatch(value)
except TypeError:
raise ValueError("Couldn't parse %s string '%r' "
"- value is not a string." %
(type_.__name__, value))
if m is None:
raise ValueError("Couldn't parse %s string: "
"'%s'" % (type_.__name__, value))
if has_named_groups:
groups = m.groupdict(0)
return type_(**dict(list(zip(
iter(groups.keys()),
list(map(int, iter(groups.values())))
))))
else:
return type_(*list(map(int, m.groups(0))))
return process
def get_range_min_tick_data(self, code, start=None, end=None, ktype=1):
start = str(fc.get_stock_timeToMarket(code)) if start is None else start
end = str(datetime.datetime.today().date()) if end is None else end
startD = datetime.datetime.strptime(start, '%Y-%m-%d')
endD = datetime.datetime.strptime(end, '%Y-%m-%d')
delta = datetime.timedelta(days=1)
inDate = endD - delta
while inDate >= startD:
self.parse(code, inDate.strftime("%Y-%m-%d"), ktype)
inDate -= delta
def downloadEquityAllData(self, code):
start = self.Symbol_Db['equity'].find({"code" : code})[0]['timeToMarket']
try:
start = datetime.datetime.strptime(str(start), '%Y%m%d')
except:
return
start = start.strftime("%Y-%m-%d")
self.get_range_daily_data(code, start) #default????
self.get_range_min_tick_data(code, start)
# ??index?????????
self.Tick_Db[code].ensure_index([('date', pymongo.DESCENDING)])
self.OneMin_Db[code].ensure_index([('date', pymongo.DESCENDING)])
def updateEquityAllData(self, code):
# find the latest timestamp in collection.
latest = self.Daily_Db[code].find_one(sort=[('date', pymongo.DESCENDING)])['date']
latest = datetime.datetime.strptime(str(latest), '%Y-%m-%d')
start = datetime.datetime.strftime(latest + timedelta(days=1), '%Y-%m-%d')
self.get_range_daily_data(code, start) #default????
self.get_range_min_tick_data(code, start)
def loadMcCsv(self, fileName, dbName, symbol):
"""?Multicharts???csv??????????Mongo????"""
import csv
start = time()
print u'????CSV??%s???????%s?%s?' %(fileName, dbName, symbol)
# ??????????
host, port = loadMongoSetting()
client = pymongo.MongoClient(host, port)
collection = client[dbName][symbol]
collection.ensure_index([('datetime', pymongo.ASCENDING)], unique=True)
# ???????????
reader = csv.DictReader(file(fileName, 'r'))
for d in reader:
bar = CtaBarData()
bar.vtSymbol = symbol
bar.symbol = symbol
bar.open = float(d['Open'])
bar.high = float(d['High'])
bar.low = float(d['Low'])
bar.close = float(d['Close'])
bar.date = datetime.strptime(d['Date'], '%Y/%m/%d').strftime('%Y%m%d')
bar.time = d['Time']
bar.datetime = datetime.strptime(bar.date + ' ' + bar.time, '%Y%m%d %H:%M:%S')
bar.volume = d['TotalVolume']
flt = {'datetime': bar.datetime}
collection.update_one(flt, {'$set':bar.__dict__}, upsert=True)
print bar.date, bar.time
print u'????????%s' % (time()-start)
def GetTabularDate(newFolder):
# Get string for SAVEREST date from tabular/sacatlog.txt file
# Use it to compare with the date from the WSS dataset
# If the existing database is same or newer, it will be kept and the WSS version skipped
# The original string looks like this: 12/05/2013 23:44:00
#
# Return YYYYMMDD as integer
try:
tabDate = 0
# Try finding the text file in the tabular folder and reading SAVEREST from that file.
saCatalog = os.path.join(newFolder, r"tabular\sacatlog.txt")
if arcpy.Exists(saCatalog):
fh = open(saCatalog, "r")
rec = fh.readline()
fh.close()
# Example date (which is index 3 in pipe-delimited file): 9/23/2014 6:49:27
vals = rec.split("|")
recDate = vals[3]
wssDate = "%m/%d/%Y %H:%M:%S" # string date format used for SAVEREST in text file
intDate = "%Y%m%d" # YYYYMMDD format for comparison
dateObj = datetime.strptime(recDate, wssDate)
tabDate = int(dateObj.strftime(intDate))
else:
AddMsgAndPrint(" \nUnable to find file: " + saCatalog, 1)
return tabDate
except:
errorMsg()
return tabDate
## ===================================================================================
def str_to_datetime_processor_factory(regexp, type_):
rmatch = regexp.match
# Even on python2.6 datetime.strptime is both slower than this code
# and it does not support microseconds.
has_named_groups = bool(regexp.groupindex)
def process(value):
if value is None:
return None
else:
try:
m = rmatch(value)
except TypeError:
raise ValueError("Couldn't parse %s string '%r' "
"- value is not a string." %
(type_.__name__, value))
if m is None:
raise ValueError("Couldn't parse %s string: "
"'%s'" % (type_.__name__, value))
if has_named_groups:
groups = m.groupdict(0)
return type_(**dict(list(zip(
iter(groups.keys()),
list(map(int, iter(groups.values())))
))))
else:
return type_(*list(map(int, m.groups(0))))
return process
def plot_by_all( data ):
x_vals = list()
y_vals = list()
for k in sorted(data):
x_vals.append( datetime.datetime.strptime(k, key_pattern) )
y_vals.append(data[k])
x_vals2 = matplotlib.dates.date2num(x_vals)
plt.clf()
#plt.plot_date(x_vals2, y_vals)
plt.plot(x_vals2, y_vals)
plt.show()
def plot_by_interval(data, zamg_dfs = None):
p_vals = {}
# sort by year
for k in sorted(data):
v = data[k]
d_obj = datetime.datetime.strptime(k, key_pattern)
series_key = datetime.datetime.strftime(d_obj, "%Y")
#data_key = datetime.datetime.strftime(d_obj, "%m-%d")
p_vals.setdefault(series_key, dict()).setdefault("x_vals", list()).append(d_obj)
p_vals.setdefault(series_key, dict()).setdefault("y_vals", list()).append(v)
plt.clf()
fig, axis = plt.subplots(nrows=len(p_vals)*2, sharex=False, sharey=False)
a_iter = iter(axis)
for k in sorted(p_vals):
v = p_vals[k]
ax = next(a_iter)
y_vals = v["y_vals"]
x_vals = matplotlib.dates.date2num(v["x_vals"])
ax.plot_date(x_vals, y_vals)
# ax.plot(x_vals, y_vals)
ax = next(a_iter)
if zamg_dfs is not None and k in zamg_dfs:
df = zamg_dfs[k]
df['Wien Hohe Warte']['48,2486']['16,3564']['198.0']['Anhöhe']['Ebene']\
['Lufttemperatur']['Lufttemperatur um 14 MEZ (°C)'].plot(ax=ax)
plt.show()
def reformat_dateteime( in_date, in_pattern, out_pattern ):
d = datetime.datetime.strptime(in_date, in_pattern)
return datetime.datetime.strftime(d, out_pattern)
def check_product(product):
"""
Checks a product for currency.
:param product: Product dictionary
:type product: dict
"""
global PROD_OK, PROD_WARN, PROD_CRIT
#check if product unsynced
if product["last_sync"] == None:
LOGGER.debug("Product '{0}' ({1}) is UNSYNCED!".format(
product["label"], product["description"]
))
PROD_CRIT.append(product["label"])
set_code(2)
else:
LOGGER.debug("Product '{0}' ({1}) was synced at {2}".format(
product["label"], product["description"], product["last_sync"][0:19]
))
last_sync = datetime.strptime(
product["last_sync"][0:19], "%Y-%m-%d %H:%M:%S"
)
delta = datetime.now() - last_sync
LOGGER.debug("Delta for '{0}' is {1} days".format(
product["label"], delta.days
))
if delta.days > options.outdated_crit:
PROD_CRIT.append(product["label"])
set_code(2)
LOGGER.debug("Critical product: '{0}'".format(product["label"]))
if delta.days > options.outdated_warn:
PROD_WARN.append(product["label"])
set_code(1)
LOGGER.debug("Warning product: '{0}'".format(product["label"]))
else:
PROD_OK.append(product["label"])
LOGGER.debug("Ok product: '{0}'".format(product["label"]))
def valid_date(input_date):
"""Validate input dates against a certain format.
:param str input_date: Date value to check
:return: Loaded date value
"""
try:
return datetime.strptime(input_date, "%Y-%m-%d")
except ValueError:
msg = "Not a valid date: '{0}'.".format(input_date)
raise argparse.ArgumentTypeError(msg)
def str_to_datetime_processor_factory(regexp, type_):
rmatch = regexp.match
# Even on python2.6 datetime.strptime is both slower than this code
# and it does not support microseconds.
has_named_groups = bool(regexp.groupindex)
def process(value):
if value is None:
return None
else:
try:
m = rmatch(value)
except TypeError:
raise ValueError("Couldn't parse %s string '%r' "
"- value is not a string." %
(type_.__name__, value))
if m is None:
raise ValueError("Couldn't parse %s string: "
"'%s'" % (type_.__name__, value))
if has_named_groups:
groups = m.groupdict(0)
return type_(**dict(list(zip(
iter(groups.keys()),
list(map(int, iter(groups.values())))
))))
else:
return type_(*list(map(int, m.groups(0))))
return process
test_start_late_homework_student.py 文件源码
项目:TestRewrite
作者: osqa-interns
项目源码
文件源码
阅读 30
收藏 0
点赞 0
评论 0
def get_new_set_time(seconds_added = 60):
"""
-Helper function used in order to create
an assignment that will be due very soon (late assignmnet)
-Should be a very easy process for most cases, but deals with all the
difficulties for you
-The new time computed is calculated from the local time
Uses datetime.datetime
:param seconds_added: number of seconds from
now that you want the due date of the assignment to be
:return:
string of format HHMMpm/am which you can enter directly into the due time
textbox in order to set the due time
"""
# get current time and time at which to make it due
time_now = datetime.now()
time_now = time_now + timedelta(seconds=seconds_added)
time_string = str(time_now.hour) + str(time_now.minute)
time_structure = datetime.strptime(time_string,"%H%M")
# time_structure is a datetime.datetime object
time_string_reform = time_structure.strftime("%I%M%p")
# ^ set_time comes in format "HH MM am/pm"
return time_string_reform
################
def _originally_available_at(self):
return datetime.strptime(self.data.get('originallyAvailableAt',
'0000-00-00'),
'%Y-%m-%d').date()
def str_to_datetime_processor_factory(regexp, type_):
rmatch = regexp.match
# Even on python2.6 datetime.strptime is both slower than this code
# and it does not support microseconds.
has_named_groups = bool(regexp.groupindex)
def process(value):
if value is None:
return None
else:
try:
m = rmatch(value)
except TypeError:
raise ValueError("Couldn't parse %s string '%r' "
"- value is not a string." %
(type_.__name__, value))
if m is None:
raise ValueError("Couldn't parse %s string: "
"'%s'" % (type_.__name__, value))
if has_named_groups:
groups = m.groupdict(0)
return type_(**dict(list(zip(
iter(groups.keys()),
list(map(int, iter(groups.values())))
))))
else:
return type_(*list(map(int, m.groups(0))))
return process
def str_to_datetime_processor_factory(regexp, type_):
rmatch = regexp.match
# Even on python2.6 datetime.strptime is both slower than this code
# and it does not support microseconds.
has_named_groups = bool(regexp.groupindex)
def process(value):
if value is None:
return None
else:
try:
m = rmatch(value)
except TypeError:
raise ValueError("Couldn't parse %s string '%r' "
"- value is not a string." %
(type_.__name__, value))
if m is None:
raise ValueError("Couldn't parse %s string: "
"'%s'" % (type_.__name__, value))
if has_named_groups:
groups = m.groupdict(0)
return type_(**dict(list(zip(
iter(groups.keys()),
list(map(int, iter(groups.values())))
))))
else:
return type_(*list(map(int, m.groups(0))))
return process
def str_to_datetime_processor_factory(regexp, type_):
rmatch = regexp.match
# Even on python2.6 datetime.strptime is both slower than this code
# and it does not support microseconds.
has_named_groups = bool(regexp.groupindex)
def process(value):
if value is None:
return None
else:
try:
m = rmatch(value)
except TypeError:
raise ValueError("Couldn't parse %s string '%r' "
"- value is not a string." %
(type_.__name__, value))
if m is None:
raise ValueError("Couldn't parse %s string: "
"'%s'" % (type_.__name__, value))
if has_named_groups:
groups = m.groupdict(0)
return type_(**dict(zip(groups.iterkeys(),
map(int, groups.itervalues()))))
else:
return type_(*map(int, m.groups(0)))
return process
def str_to_datetime_processor_factory(regexp, type_):
rmatch = regexp.match
# Even on python2.6 datetime.strptime is both slower than this code
# and it does not support microseconds.
has_named_groups = bool(regexp.groupindex)
def process(value):
if value is None:
return None
else:
try:
m = rmatch(value)
except TypeError:
raise ValueError("Couldn't parse %s string '%r' "
"- value is not a string." %
(type_.__name__, value))
if m is None:
raise ValueError("Couldn't parse %s string: "
"'%s'" % (type_.__name__, value))
if has_named_groups:
groups = m.groupdict(0)
return type_(**dict(list(zip(
iter(groups.keys()),
list(map(int, iter(groups.values())))
))))
else:
return type_(*list(map(int, m.groups(0))))
return process