def display_imgur_client_information(self):
"""Opens a dialog that tells the user how many imgur credits they have remaining"""
if self.imgur_client[0] is not None and self.imgur_client[1] is not None:
try:
imgur_client = imgurpython.ImgurClient(self.imgur_client[0], self.imgur_client[1])
except imgurpython.helpers.error.ImgurClientError:
imgur_client = None
Message.invalid_imgur_client(self)
else:
Message.no_imgur_client(self)
imgur_client = None
if imgur_client is not None:
credits_dict = imgur_client.credits
dialog_text = 'Application credit limit: %s\nApplication credits remaining: %s\n\nUser credit limit: %s' \
'\nUser credits remaining: %s\nTime user credits reset: %s' %\
(credits_dict['ClientLimit'], credits_dict['ClientRemaining'], credits_dict['UserLimit'],
credits_dict['UserRemaining'],
date.strftime(datetime.fromtimestamp(credits_dict['UserReset']), '%m-%d-%Y at %I:%M %p'))
reply = QtWidgets.QMessageBox.information(self, 'Imgur Credits', dialog_text, QtWidgets.QMessageBox.Ok)
python类strftime()的实例源码
RedditDownloaderGUI.py 文件源码
项目:DownloaderForReddit
作者: MalloyDelacroix
项目源码
文件源码
阅读 26
收藏 0
点赞 0
评论 0
def date_to_str(date):
return date.strftime('%Y-%m-%d')
def date_to_weekday(date):
return time.strftime("%A", time.strptime(date, "%Y-%m-%d"))
def get_durations_from_waka(date, header):
req_url = BASE_URL + date.strftime("%Y-%m-%d")
response = requests.get(req_url, headers=header)
return response.json()
def write_data_to_dataframe(df, start_date, end_date):
last_df_index = len(df)
if last_df_index > 0:
start_date_str = df["date"].values[last_df_index - 1]
start_date = datetime.strptime(start_date_str, "%Y-%m-%d").date()
start_date = start_date + timedelta(days=1)
for d in date_range(start_date, end_date):
response_json = get_durations_from_waka(d, prepare_request_header(str.encode(API_KEY)))
try:
data = response_json["data"]
data_dict = {}
for duration_data in data:
project_name = duration_data["project"]
duration = duration_data["duration"]
try:
data_dict[project_name] += duration
except KeyError:
data_dict[project_name] = duration
for k, v in data_dict.items():
new_row = [d.strftime("%Y-%m-%d"), k, v]
df.loc[last_df_index] = new_row
last_df_index += 1
except KeyError as keyError:
print("[*] ERROR: for {0}, key error: {1}".format(d.strftime("%Y-%m-%d"), keyError))
print("This means that you can't see this day's records because you've exceeded the free Waka limit")
print("Run this script at least once in a week to get all the records!")
print("Durations saved for: {0}".format(d.strftime("%Y-%m-%d")))
def get_durations_from_waka(date, header):
req_url = BASE_URL + date.strftime("%Y-%m-%d")
response = requests.get(req_url, headers=header)
return response.json()
def write_data_to_dataframe(df, start_date, end_date):
last_df_index = len(df)
if last_df_index > 0:
start_date_str = df["date"].values[last_df_index - 1]
start_date = datetime.strptime(start_date_str, "%Y-%m-%d").date()
start_date = start_date + timedelta(days=1)
for d in date_range(start_date, end_date):
response_json = get_durations_from_waka(d, prepare_request_header(str.encode(API_KEY)))
try:
data = response_json["data"]
data_dict = {}
for duration_data in data:
project_name = duration_data["project"]
duration = duration_data["duration"]
try:
data_dict[project_name] += duration
except KeyError:
data_dict[project_name] = duration
for k, v in data_dict.items():
new_row = [d.strftime("%Y-%m-%d"), k, v]
df.loc[last_df_index] = new_row
last_df_index += 1
except KeyError as keyError:
print("[*] ERROR: for {0}, key error: {1}".format(d.strftime("%Y-%m-%d"), keyError))
print("This means that you can't see this day's records because you've exceeded the free Waka limit")
print("Run this script at least once in a week to get all the records!")
print("Durations saved for: {0}".format(d.strftime("%Y-%m-%d")))
def fmt_date(date):
return "%s, week %d, %s" % (
date.isoformat(),
date.isocalendar()[1],
date.strftime("%A, %B"))
def formatDate(date):
return date.strftime("%m/%d/%Y")
def _datetime_to_label(date):
return date.strftime('%Y-%m-%d')
def __str__(self):
if not self.title_en and not self.title_el:
return date.strftime('%x')
else:
if self.title_en:
return self.title_en
else:
return self.title_el
return 'no_title'
def sshot(opts=[], upload=False):
"""Call sshot and upload screenshot. Returns the image name.
Keyword arguments:
opts -- options to send to sshot (default [])
upload -- boolean to decide whether to upload screenshot or not.
(default False)
"""
my_env = os.environ.copy()
date=datetime.now()
imgname = "sshot-" + date.strftime('%Y-%m-%d_%H:%M:%S') + ".png"
command = []
if upload:
my_env['SSHOT_UPLOAD'] = 'true'
else:
my_env['SSHOT_UPLOAD'] = 'false'
my_env['IMGNAME'] = imgname
command = ['sshot']
command.extend(opts)
proc = Popen(command, stdin=PIPE, stdout=PIPE, env=my_env)
exit_code = proc.wait()
if exit_code == 0:
return(imgname)
else:
return(False)
def getCommonDayMsg(self,date):
weekend = [5,6]
#?????
dayOfWeek = date.weekday()
#?????
if dayOfWeek in weekend:
content = "???",date.strftime('%b?%d?%?'),",",get_week_day(date),"?????????~"
return (time(9, 0, 0),content)
leftDay = 5 - dayOfWeek
content = "???",date.strftime('%b?%d?%?'),",",get_week_day(date),"??????",leftDay,"???????~"
return (time(7, 0, 0),content)
#TODO
def fgm_hires(starttime, endtime):
"""
Import high resolution fluxgate magnetometer data.
Parameters
----------
starttime : datetime
Start of interval
endtime : datetime
End of interval
Returns
-------
data : DataFrame
Requested data
"""
fgm_options = url_options
readargs = {'names': ['year', 'doy', 'hour', 'minute', 'second',
'Bx', 'By', 'Bz', '|B|'],
'delim_whitespace': True}
data = []
dtimes = helper._daysplitinterval(starttime, endtime)
# Loop through years
for dtime in dtimes:
date = dtime[0]
yearstr = date.strftime('%Y')
fgm_options['FILE_NAME'] = ('U' + yearstr[-2:] +
date.strftime('%j') + 'SH.ASC')
# Local locaiton to download to
local_dir = os.path.join(ulysses_dir, 'fgm', 'hires', yearstr)
local_file = os.path.join(local_dir, fgm_options['FILE_NAME'])
local_hdf = local_file[:-4] + '.hdf'
# If we have already saved a hdf file
if os.path.exists(local_hdf):
thisdata = pd.read_hdf(local_hdf)
else:
# Put together remote url
fgm_options['FILE_PATH'] = '/ufa/HiRes/VHM-FGM/' + yearstr
remote_url = ulysses_url
for key in fgm_options:
remote_url += key + '=' + fgm_options[key] + '&'
f = helper.load(fgm_options['FILE_NAME'], local_dir, remote_url)
# Read in data
thisdata = pd.read_table(f, **readargs)
# Process data/time
thisdata = _convert_ulysses_time(thisdata)
if use_hdf:
thisdata.to_hdf(local_hdf, 'fgm_hires')
data.append(thisdata)
return helper.timefilter(data, starttime, endtime)
def get_chart_and_text(event):
cache = event.get_cache()
cache_key = get_cache_key(event)
chart_data = cache.get(cache_key)
if chart_data:
return chart_data
result = {}
include_pending = event.settings.stretchgoals_include_pending or False
avg_chart = event.settings.stretchgoals_chart_averages or False
total_chart = event.settings.stretchgoals_chart_totals or False
event.settings._h.add_type(
QuerySet,
lambda queryset: ','.join([str(element.pk) for element in queryset]),
lambda pk_list: [Item.objects.get(pk=element) for element in pk_list.split(',') if element]
)
items = event.settings.get('stretchgoals_items', as_type=QuerySet) or []
start_date = get_start_date(event, items, include_pending)
end_date = get_end_date(event, items, include_pending)
goals = get_goals(event)
data = {
'avg_data': {
'data': [{
'date': date.strftime('%Y-%m-%d'),
'price': get_average_price(event, start_date, date, items, include_pending) or 0,
} for date in get_date_range(start_date, end_date)] if avg_chart else None,
'target': [goal.get('avg', 0) for goal in goals],
'label': 'avg',
},
'total_data': {
'data': [{
'date': date.strftime('%Y-%m-%d'),
'price': get_total_price(event, start_date, date, items, include_pending) or 0,
} for date in get_date_range(start_date, end_date)] if total_chart else None,
'target': [goal['total'] for goal in goals],
'label': 'total',
},
}
result['data'] = {key: json.dumps(value, cls=ChartJSONEncoder) for key, value in data.items()}
try:
result['avg_now'] = data['avg_data']['data'][-1]['price']
result['total_now'] = data['total_data']['data'][-1]['price']
except (TypeError, IndexError): # no data, data[-1] does not exist
result['avg_now'] = 0
result['total_now'] = 0
for goal in goals:
goal['avg_required'] = get_required_average_price(event, items, include_pending, goal['total'], goal['amount'], result['total_now'])
goal['total_left'] = goal['total'] - result['total_now']
result['goals'] = goals
result['significant'] = (
not event.settings.stretchgoals_min_orders
or get_base_queryset(event, items, include_pending).count() >= event.settings.get('stretchgoals_min_orders', as_type=int)
)
result['public_text'] = get_public_text(event, items, include_pending, data=result)
cache.set(cache_key, result, timeout=3600)
return result