def get(self):
self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
self.response.headers['Access-Control-Allow-Origin'] = '*'
try:
data = json.loads(memcache.get(MC_OSCARS_TOP10))
except:
self.response.write(json.dumps({'entities':[]}))
return
entities = data['entities']
if not entities:
self.response.write(json.dumps({'entities':[]}))
return
self.response.write(json.dumps({
'entities': zip(*entities)[0],
"time": datetime.utcfromtimestamp(data['timestamp']).isoformat()+'.000Z'
}))
python类utcfromtimestamp()的实例源码
def __init__(self, name, value=None, path='/',
expires=None, max_age=None,
domain=None, secure=None, httponly=None,
options=None):
self.name = name
self.value = value
self.path = path
if max_age is None:
self.expires = expires
else:
self.expires = datetime.utcfromtimestamp(time() + max_age)
if domain is None:
self.domain = options['HTTP_COOKIE_DOMAIN']
else:
self.domain = domain
if secure is None:
self.secure = options['HTTP_COOKIE_SECURE']
else:
self.secure = secure
if httponly is None:
self.httponly = options['HTTP_COOKIE_HTTPONLY']
else:
self.httponly = httponly
def pokestop(data):
log.debug("Converting to pokestop: \n {}".format(data))
if data.get('lure_expiration') is None:
log.debug("Un-lured pokestop... ignoring.")
return None
stop = {
'type': "pokestop",
'id': data['pokestop_id'],
'expire_time': datetime.utcfromtimestamp(data['lure_expiration']),
'lat': float(data['latitude']),
'lng': float(data['longitude']),
'lat_5': "{:.5f}".format(float(data['latitude'])),
'lng_5': "{:.5f}".format(float(data['longitude']))
}
stop['gmaps'] = get_gmaps_link(stop['lat'], stop['lng'])
stop['applemaps'] = get_applemaps_link(stop['lat'], stop['lng'])
return stop
test_history_data_driver.py 文件源码
项目:kafka-spark-influx-csv-analysis
作者: bwsw
项目源码
文件源码
阅读 30
收藏 0
点赞 0
评论 0
def __get_result_set_from_mock(mock, query):
result_wrapper = Mock()
influx_points = []
result = re.search(r"(\d{19}).*?(\d{19})", query)
start, end = result.groups()
# extract range
points = list(filter(lambda point: point["time"] > int(start) and point["time"] < int(end),
mock.points))
country_result = re.search(r"\"country\"=\'(\w+)\'", query)
if country_result:
country = country_result.groups()[0]
points = list(filter(lambda point: point["tags"]["country"] == country, points))
for point in points:
d = {**point["fields"], **point.get("tags", {})}
d["time"] = datetime.utcfromtimestamp(point["time"] // 1000000000).strftime('%Y-%m-%dT%H:%M:%SZ')
influx_points.append(d)
result_wrapper.get_points.return_value = influx_points
return result_wrapper
def _get_message_date(self, message):
"""Finds date and time information for `message` and converts it to ISO-8601 format and UTC timezone.
"""
mail_date = message.get('Date', '').decode('utf-8')
if not mail_date:
"""The get_from() result always (so far as I have seen!) has the date string in the last 30 characters"""
mail_date = message.get_from().strip()[-30:]
datetime_tuple = email.utils.parsedate_tz(mail_date)
if datetime_tuple:
unix_time = email.utils.mktime_tz(datetime_tuple)
mail_date_iso8601 = datetime.utcfromtimestamp(unix_time).isoformat(' ')
else:
mail_date_iso8601 = ''
return mail_date_iso8601
def pprint(rate_limit):
"""
Pretty print rate limit dictionary to be easily parsable and readable
across multiple lines
"""
# Ignoring the 'rate' key b/c github API claims this will be removed in
# next major version:
# https://developer.github.com/v3/rate_limit/#deprecation-notice
def print_(name, limits):
date_ = datetime.utcfromtimestamp(limits[name]['reset'])
print '%8s remaining: %4s limit: %4s reset: %s' % (
name,
limits[name]['remaining'],
limits[name]['limit'],
date_.strftime('%d-%m-%Y %H:%M:%S'))
print_('core', rate_limit['resources'])
print_('search', rate_limit['resources'])
def parse_trade(cls, instmt, raw):
"""
:param instmt: Instrument
:param raw: Raw data in JSON
:return:
"""
trade = Trade()
trade_id = raw[0]
timestamp = raw[1]
trade_price = raw[2]
trade_volume = raw[3]
trade.date_time = datetime.utcfromtimestamp(timestamp).strftime("%Y%m%d %H:%M:%S.%f")
trade.trade_side = Trade.Side.BUY if trade_volume > 0 else Trade.Side.SELL
trade.trade_volume = abs(trade_volume)
trade.trade_id = str(trade_id)
trade.trade_price = trade_price
return trade
def parse_trade(cls, instmt, raw):
"""
:param instmt: Instrument
:param raw: Raw data in JSON
:return:
"""
trade = Trade()
trade_id = raw['order_id']
trade_price = float(raw['counter']) / float(raw['base'])
trade_volume = float(raw['base'])
timestamp = float(raw[cls.get_trades_timestamp_field_name()]) / 1000.0
trade.date_time = datetime.utcfromtimestamp(timestamp).strftime("%Y%m%d %H:%M:%S.%f")
trade.trade_volume = trade_volume
trade.trade_id = trade_id
trade.trade_price = trade_price
return trade
def parse_trade(cls, instmt, raw):
"""
:param instmt: Instrument
:param raw: Raw data in JSON
:return:
"""
trade = Trade()
# Trade price
trade.trade_price = float(str(raw[0]))
# Trade volume
trade.trade_volume = float(str(raw[1]))
# Timestamp
date_time = float(raw[2])
trade.date_time = datetime.utcfromtimestamp(date_time).strftime("%Y%m%d %H:%M:%S.%f")
# Trade side
trade.trade_side = Trade.parse_side(raw[3])
# Trade id
trade.trade_id = trade.date_time + '-' + str(instmt.get_exch_trade_id())
return trade
def dump(self, *args, **kwargs):
print("file_header (0x%04x):" % (self.sa_magic,))
print("\tsa_ust_time", repr(self.sa_ust_time), datetime.utcfromtimestamp(self.sa_ust_time))
print("\tsa_actflag", repr(self.sa_actflag))
print("\tsa_nr_pid", repr(self.sa_nr_pid))
print("\tsa_irqcpu", repr(self.sa_irqcpu))
print("\tsa_nr_disk", repr(self.sa_nr_disk))
print("\tsa_proc", repr(self.sa_proc))
print("\tsa_serial", repr(self.sa_serial))
print("\tsa_iface", repr(self.sa_iface))
print("\tsa_magic 0x%04x" % self.sa_magic)
print("\tsa_st_size", repr(self.sa_st_size))
print("\tsa_day", repr(self.sa_day))
print("\tsa_month", repr(self.sa_month))
print("\tsa_year", repr(self.sa_year))
print("\tsa_sizeof_long", repr(self.sa_sizeof_long))
print("\tsa_sysname", repr(self.sa_sysname))
print("\tsa_nodename", repr(self.sa_nodename))
print("\tsa_release", repr(self.sa_release))
def check_for_update():
if os.path.exists(FILE_UPDATE):
mtime = os.path.getmtime(FILE_UPDATE)
last = datetime.utcfromtimestamp(mtime).strftime('%Y-%m-%d')
today = datetime.utcnow().strftime('%Y-%m-%d')
if last == today:
return
try:
with open(FILE_UPDATE, 'a'):
os.utime(FILE_UPDATE, None)
request = urllib2.Request(
CORE_VERSION_URL,
urllib.urlencode({'version': main.__version__}),
)
response = urllib2.urlopen(request)
with open(FILE_UPDATE, 'w') as update_json:
update_json.write(response.read())
except (urllib2.HTTPError, urllib2.URLError):
pass
def make_new_entry(self, rel_path, id_handler):
"""
Generates a new entry for the specified path.
Note: This will mutate the id_handler!
"""
# Try to match to an existing book.
e_id = id_handler.new_id()
abs_path = os.path.join(read_from_config('media_loc').path, rel_path)
lmtime = os.path.getmtime(abs_path)
added_dt = datetime.utcfromtimestamp(lmtime)
last_modified = added_dt.replace(tzinfo=timezone.utc)
entry_obj = oh.Entry(id=e_id, path=rel_path,
date_added=datetime.now(timezone.utc),
last_modified=last_modified,
type='Book', table=self.BOOK_TABLE_NAME, data_id=None,
hashseed=_rand.randint(0, 2**32))
return entry_obj
def load(self, raw_value):
"""Converts an input raw value into a timestamp.
Returns: Datetime object, if the conversion succeeds;
None, if the conversion fails.
raw_value -- The raw value, in string format (eg. '2014-12-20 15:01:02'),
or in milliseconds since Epoch (eg. 1293581619000)
"""
if isinstance(raw_value, str):
try:
timestamp = datetime.strptime(raw_value, "%Y-%m-%d %H:%M:%S")
except:
timestamp = None
else:
try:
timestamp = datetime.utcfromtimestamp(float(raw_value)/1000)
except:
timestamp = None
return timestamp
def test_datetimefield(self):
class A(Model):
date = DatetimeField()
time = 1478390400
dt = datetime.utcfromtimestamp(time)
a = A(date=dt)
self.assertEqual(a.date, dt)
with self.assertRaises(FieldTypeError):
a.date = 'hello'
pj = a.to_pyjson()
self.assertEqual(pj['date'], time)
aa = A.from_json(a.to_json())
self.assertEqual(aa.date, dt)
def check_for_update():
if os.path.exists(FILE_UPDATE):
mtime = os.path.getmtime(FILE_UPDATE)
last = datetime.utcfromtimestamp(mtime).strftime('%Y-%m-%d')
today = datetime.utcnow().strftime('%Y-%m-%d')
if last == today:
return
try:
with open(FILE_UPDATE, 'a'):
os.utime(FILE_UPDATE, None)
request = urllib2.Request(
CORE_VERSION_URL,
urllib.urlencode({'version': main.__version__}),
)
response = urllib2.urlopen(request)
with open(FILE_UPDATE, 'w') as update_json:
update_json.write(response.read())
except (urllib2.HTTPError, urllib2.URLError):
pass
def get_seconds_from_date(date):
"""
The number of seconds from the epoch.
Parameters
----------
date: datetime
Returns
-------
int
"""
epoch = datetime.utcfromtimestamp(0)
epoch = epoch.replace(tzinfo=pytz.UTC)
return int((date - epoch).total_seconds())
def get_minutebar(filesource):
output = []
with open(filesource, 'rb') as file_:
a = C_PERIODIC_BAR()
while (file_.readinto(a) == sizeof(C_PERIODIC_BAR)):
open_ = Quote(a.open.bid_price, a.open.bid_size, a.open.ask_price, a.open.ask_size)
close_ = Quote(a.close.bid_price, a.close.bid_size, a.close.ask_price, a.close.ask_size)
ts = datetime.utcfromtimestamp(a.ts.time.tv_sec + a.ts.time.tv_usec / 1000000)
ts_with_tz = datetime(year=ts.year,
month=ts.month,
day=ts.day,
hour=ts.hour,
minute=ts.minute,
second=ts.second,
tzinfo=pytz.UTC)
elem = PeriodicBar(open_, close_, a.high, a.low, a.volume, ts_with_tz)
output.append(elem)
return output
## @brief Make quote objects from futures data
# date,product,specific_ticker,open,high,low,close,contract_volume,contract_oi,total_volume,total_oi
#
def check_for_update():
if os.path.exists(FILE_UPDATE):
mtime = os.path.getmtime(FILE_UPDATE)
last = datetime.utcfromtimestamp(mtime).strftime('%Y-%m-%d')
today = datetime.utcnow().strftime('%Y-%m-%d')
if last == today:
return
try:
with open(FILE_UPDATE, 'a'):
os.utime(FILE_UPDATE, None)
request = urllib2.Request(
CORE_VERSION_URL,
urllib.urlencode({'version': __version__}),
)
response = urllib2.urlopen(request)
with open(FILE_UPDATE, 'w') as update_json:
update_json.write(response.read())
except (urllib2.HTTPError, urllib2.URLError):
pass
def from_soup(cls, tweet):
return cls(
user=tweet.find('span', 'username').text[1:],
fullname=tweet.find('strong', 'fullname').text,
id=tweet['data-item-id'],
url = tweet.find('div', 'tweet')['data-permalink-path'],
timestamp=datetime.utcfromtimestamp(
int(tweet.find('span', '_timestamp')['data-time'])),
text=tweet.find('p', 'tweet-text').text or "",
replies = tweet.find(
'span', 'ProfileTweet-action--reply u-hiddenVisually').find(
'span', 'ProfileTweet-actionCount')['data-tweet-stat-count'] or '0',
retweets = tweet.find(
'span', 'ProfileTweet-action--retweet u-hiddenVisually').find(
'span', 'ProfileTweet-actionCount')['data-tweet-stat-count'] or '0',
likes = tweet.find(
'span', 'ProfileTweet-action--favorite u-hiddenVisually').find(
'span', 'ProfileTweet-actionCount')['data-tweet-stat-count'] or '0',
)
def format_date(unix_timestamp):
""" Return a standardized date format for use in the two1 library.
This function produces a localized datetime string that includes the UTC timezone offset. This offset is
computed as the difference between the local version of the timestamp (python's datatime.fromtimestamp)
and the utc representation of the input timestamp.
Args:
unix_timestamp (float): a floating point unix timestamp
Returns:
string: A string formatted with "%Y-%m-%d %H:%M:%S %Z"
"""
local_datetime = datetime.fromtimestamp(unix_timestamp)
utz_offset = local_datetime - datetime.utcfromtimestamp(unix_timestamp)
local_date = local_datetime.replace(
tzinfo=timezone(utz_offset)
).strftime("%Y-%m-%d %H:%M:%S %Z")
return local_date
def listsnapshots(self, sortbycreation=False, sortreverse=False):
output = self._srv.invoke("volume-size", "volume", self._volname)
self._check_netapp_error(output, "Failed to get volume size information")
volsize = self._volsize_to_num(output.child_get_string("volume-size"))
pct_limit = round(2147483648*100/(volsize/self._blocksize))
output = self._srv.invoke("snapshot-list-info", "volume", self._volname)
self._check_netapp_error(output, "Failed to list snapshots")
snapshotlist = output.child_get("snapshots")
snapshots = []
if (snapshotlist is not None and snapshotlist):
for ss in snapshotlist.children_get():
snapshots.append( {'id': ss.child_get_string("name"),
'creation': datetime.utcfromtimestamp(float(ss.child_get_int("access-time"))),
'numclones': 1 if ss.child_get_string("busy") == "true" else 0,
'space_total': ss.child_get_int("cumulative-total")*self._blocksize if ss.child_get_int("cumulative-percentage-of-total-blocks") < pct_limit else round(volsize*ss.child_get_int("cumulative-percentage-of-total-blocks")/100),
'space_unique': ss.child_get_int("total")*self._blocksize if ss.child_get_int("percentage-of-total-blocks") < pct_limit else round(volsize*ss.child_get_int("percentage-of-total-blocks")/100)
} )
if not sortbycreation:
return snapshots
else:
return sorted(snapshots, key=operator.itemgetter('creation'), reverse=sortreverse)
def get_stamp(stampdir):
stampfile = os.path.join(stampdir,'stamp')
# read first line of stamp file
f = open(stampfile,mode='rt')
line = f.readline()
f.close()
# extract timestamp (first field of line) and compare to others
stamp = int( line.split().pop(0) )
if not stamp>0:
raise BackupError("Can't read `{}'".format(stampfile))
dt = datetime.utcfromtimestamp(stamp)
return dt
# find a list of backups for a particular machine
def unix_time(zulu_time_string):
dt = datetime.strptime(zulu_time_string, "%Y-%m-%dT%H:%M:%SZ")
epoch = datetime.utcfromtimestamp(0)
delta = dt - epoch
return int(delta.total_seconds() * 1000)
def unix_time(zulu_time_string):
dt = datetime.strptime(zulu_time_string, "%Y-%m-%dT%H:%M:%SZ")
epoch = datetime.utcfromtimestamp(0)
delta = dt - epoch
return int(delta.total_seconds() * 1000)
def unix_time(zulu_time_string):
dt = datetime.strptime(zulu_time_string, "%Y-%m-%dT%H:%M:%SZ")
epoch = datetime.utcfromtimestamp(0)
delta = dt - epoch
return int(delta.total_seconds() * 1000)
def timestamp_to_datetime(timestamp): # UTC datetime
return datetime.utcfromtimestamp(timestamp)
def localize_utc_timestamp(utc_datetime):
''' Convert timestamp in UTC to local timezone. '''
now = time.time()
offset = datetime.fromtimestamp(now) - datetime.utcfromtimestamp(now)
return utc_datetime + offset
def _create_list_zerofill(self, data, start,stop):
next = self._to_timestamp(start)/10*10+10
stop = self._to_timestamp(stop)/10*10-180
now = 0
prev = 0
prev_t = next
for i in data:
# now = self._to_timestamp(i.timestamp)
# while now > next:
# w = 1.*(now - next)/(now - prev_t)
# yield {
# "time": datetime.utcfromtimestamp(next).isoformat()+'.000Z',
# "count": int(prev * w + i.frequency * (1-w)),
# }
# next += 10
yield {
"time": i.timestamp.isoformat()+'.000Z',
"count": int(i.frequency),
}
prev = i.frequency
prev_t = now
next = now+10
# while next < stop:
# yield {
# "time": datetime.utcfromtimestamp(next).isoformat()+'.000Z',
# "count": 0,
# }
# next += 10
def test_can_insert_udts_with_all_datatypes(self):
"""
Test for inserting all column types into a UserType
test_can_insert_udts_with_all_datatypes tests that each cqlengine column type can be inserted into a UserType.
It first creates a UserType that has each cqlengine column type, and a corresponding table/Model. It then creates
a UserType instance where all the fields have corresponding data, and inserts the UserType as an instance of the Model.
Finally, it verifies that each column read from the UserType from Cassandra is the same as the input parameters.
@since 2.5.0
@jira_ticket PYTHON-251
@expected_result The UserType is inserted with each column type, and the resulting read yields proper data for each column.
@test_category data_types:udt
"""
sync_table(AllDatatypesModel)
self.addCleanup(drop_table, AllDatatypesModel)
input = AllDatatypes(a='ascii', b=2 ** 63 - 1, c=bytearray(b'hello world'), d=True,
e=datetime.utcfromtimestamp(872835240), f=Decimal('12.3E+7'), g=2.39,
h=3.4028234663852886e+38, i='123.123.123.123', j=2147483647, k='text',
l=UUID('FE2B4360-28C6-11E2-81C1-0800200C9A66'),
m=UUID('067e6162-3b6f-4ae2-a171-2470b63dff00'), n=int(str(2147483647) + '000'))
AllDatatypesModel.create(id=0, data=input)
self.assertEqual(1, AllDatatypesModel.objects.count())
output = AllDatatypesModel.objects.first().data
for i in range(ord('a'), ord('a') + 14):
self.assertEqual(input[chr(i)], output[chr(i)])
def test_datetime_timestamp(self):
dt_value = 1454520554
self.DatetimeTest.objects.create(test_id=5, created_at=dt_value)
dt2 = self.DatetimeTest.objects(test_id=5).first()
self.assertEqual(dt2.created_at, datetime.utcfromtimestamp(dt_value))