def http_handler(self, request):
if request.path.endswith("api.sock"):
return await self.ws_handler(request)
if request.path.endswith("/monitor/"):
data = pkgutil.get_data("rci.services.monitor",
"monitor.html").decode("utf8")
return web.Response(text=data, content_type="text/html")
if request.path.endswith("/login/github"):
if request.method == "POST":
url = self.oauth.generate_request_url(("read:org", ))
return web.HTTPFound(url)
if request.path.endswith("/oauth2/github"):
return (await self._oauth2_handler(request))
if request.path.endswith("logout"):
if request.method == "POST":
sid = request.cookies.get(self.config["cookie_name"])
del(self.sessions[sid])
return web.HTTPFound("/monitor/")
return web.HTTPNotFound()
python类HTTPNotFound()的实例源码
def get_flag(base_url, cc):
url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
resp = yield from aiohttp.request('GET', url)
with contextlib.closing(resp):
if resp.status == 200:
image = yield from resp.read()
return image
elif resp.status == 404:
raise web.HTTPNotFound()
else:
raise aiohttp.HttpProcessingError(
code=resp.status, message=resp.reason,
headers=resp.headers)
# BEGIN FLAGS2_ASYNCIO_EXECUTOR
def download_one(cc, base_url, semaphore, verbose):
try:
with (yield from semaphore):
image = yield from get_flag(base_url, cc)
except web.HTTPNotFound:
status = HTTPStatus.not_found
msg = 'not found'
except Exception as exc:
raise FetchError(cc) from exc
else:
loop = asyncio.get_event_loop() # <1>
loop.run_in_executor(None, # <2>
save_flag, image, cc.lower() + '.gif') # <3>
status = HTTPStatus.ok
msg = 'OK'
if verbose and msg:
print(cc, msg)
return Result(status, cc)
# END FLAGS2_ASYNCIO_EXECUTOR
def http_get(url):
res = yield from aiohttp.request('GET', url)
if res.status == 200:
ctype = res.headers.get('Content-type', '').lower()
if 'json' in ctype or url.endswith('json'):
data = yield from res.json() # <1>
else:
data = yield from res.read() # <2>
return data
elif res.status == 404:
raise web.HTTPNotFound()
else:
raise aiohttp.errors.HttpProcessingError(
code=res.status, message=res.reason,
headers=res.headers)
def download_one(cc, base_url, semaphore, verbose):
try:
with (yield from semaphore): # <5>
image = yield from get_flag(base_url, cc)
with (yield from semaphore):
country = yield from get_country(base_url, cc)
except web.HTTPNotFound:
status = HTTPStatus.not_found
msg = 'not found'
except Exception as exc:
raise FetchError(cc) from exc
else:
country = country.replace(' ', '_')
filename = '{}-{}.gif'.format(country, cc)
loop = asyncio.get_event_loop()
loop.run_in_executor(None, save_flag, image, filename)
status = HTTPStatus.ok
msg = 'OK'
if verbose and msg:
print(cc, msg)
return Result(status, cc)
# END FLAGS3_ASYNCIO
def download_one(cc, base_url, semaphore, verbose): # <3>
try:
with (await semaphore): # <4>
image = await get_flag(base_url, cc) # <5>
except web.HTTPNotFound: # <6>
status = HTTPStatus.not_found
msg = 'not found'
except Exception as exc:
raise FetchError(cc) from exc # <7>
else:
save_flag(image, cc.lower() + '.gif') # <8>
status = HTTPStatus.ok
msg = 'OK'
if verbose and msg:
print(cc, msg)
return Result(status, cc)
# END FLAGS2_ASYNCIO_TOP
# BEGIN FLAGS2_ASYNCIO_DOWNLOAD_MANY
def register_in_memory_block_store_api(app, prefix='/blockstore'):
# Really, really simple stuff ;-)
blocks = {}
async def api_block_get(request):
id = request.match_info['id']
try:
return web.Response(body=blocks[id], content_type='application/octet-stream')
except KeyError:
raise web.HTTPNotFound()
async def api_block_post(request):
id = request.match_info['id']
if id in blocks:
raise web.HTTPConflict()
blocks[id] = await request.read()
return web.Response()
app.router.add_get(prefix + '/{id}', api_block_get)
app.router.add_post(prefix + '/{id}', api_block_post)
def delete_file(request: web.Request):
filename = request.match_info.get('name').strip()
filepath = os.path.join(config.args.storage, filename)
if filename in replication.dellog:
# We know this already
raise web.HTTPNotFound()
if not os.path.exists(filepath):
if not request.headers['User-Agent'].startswith('cockatiel/'):
logger.debug('File {} does not exist, but we will still propagate the deletion.'.format(filename))
replication.dellog.put(filename)
replication.queue_operation('DELETE', filename)
raise web.HTTPNotFound()
os.remove(filepath)
# TODO: Clean up now-empty dictionaries
logger.debug('Deleted file {}, scheduling replication.'.format(filename))
replication.dellog.put(filename)
replication.queue_operation('DELETE', filename)
return web.Response()
def restart(request):
prefix = request.match_info['prefix']
try:
repo = getattr(scopes, prefix).name
except AttributeError:
return web.HTTPNotFound()
ref = request.match_info['ref']
ref = Ref(repo, ref, '<sha>')
targets = request.GET.get('t', '').split(',')
all = request.GET.get('all')
request.app.loop.create_task(ci(ref, targets, all))
await asyncio.sleep(2)
log_url = '%slatest/%s/' % (conf['log_url'], ref.uid)
return web.HTTPFound(log_url)
def call(self, request):
msg_id = self.request.match_info['id']
data = await self.query(message_id=msg_id)
await self.insert_events(data)
if len(data['hits']['hits']) == 0:
raise HTTPNotFound(text='message not found')
data = data['hits']['hits'][0]
preview_path = self.app.router['user-preview'].url_for(**self.request.match_info)
return dict(
base_template='user/base-{}.jinja'.format('raw' if self.request.query.get('raw') else 'page'),
title='{_type} - {_id}'.format(**data),
id=data['_id'],
method=data['_type'],
details=self._details(data),
events=list(self._events(data)),
preview_url=self.full_url(f'{preview_path}?{self.request.query_string}'),
attachments=list(self._attachments(data)),
)
def get_flag(base_url, cc):
url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
resp = yield from aiohttp.request('GET', url)
with contextlib.closing(resp):
if resp.status == 200:
image = yield from resp.read()
return image
elif resp.status == 404:
raise web.HTTPNotFound()
else:
raise aiohttp.HttpProcessingError(
code=resp.status, message=resp.reason,
headers=resp.headers)
# BEGIN FLAGS2_ASYNCIO_EXECUTOR
def download_one(cc, base_url, semaphore, verbose):
try:
with (yield from semaphore):
image = yield from get_flag(base_url, cc)
except web.HTTPNotFound:
status = HTTPStatus.not_found
msg = 'not found'
except Exception as exc:
raise FetchError(cc) from exc
else:
loop = asyncio.get_event_loop() # <1>
loop.run_in_executor(None, # <2>
save_flag, image, cc.lower() + '.gif') # <3>
status = HTTPStatus.ok
msg = 'OK'
if verbose and msg:
print(cc, msg)
return Result(status, cc)
# END FLAGS2_ASYNCIO_EXECUTOR
def get_flag(base_url, cc):
url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
resp = yield from aiohttp.request('GET', url)
with contextlib.closing(resp):
if resp.status == 200:
image = yield from resp.read()
return image
elif resp.status == 404:
raise web.HTTPNotFound()
else:
raise aiohttp.HttpProcessingError(
code=resp.status, message=resp.reason,
headers=resp.headers)
# BEGIN FLAGS2_ASYNCIO_EXECUTOR
def download_one(cc, base_url, semaphore, verbose):
try:
with (yield from semaphore):
image = yield from get_flag(base_url, cc)
except web.HTTPNotFound:
status = HTTPStatus.not_found
msg = 'not found'
except Exception as exc:
raise FetchError(cc) from exc
else:
loop = asyncio.get_event_loop() # <1>
loop.run_in_executor(None, # <2>
save_flag, image, cc.lower() + '.gif') # <3>
status = HTTPStatus.ok
msg = 'OK'
if verbose and msg:
print(cc, msg)
return Result(status, cc)
# END FLAGS2_ASYNCIO_EXECUTOR
def download_one(cc, base_url, semaphore, verbose): # <3>
try:
with (await semaphore): # <4>
image = await get_flag(base_url, cc) # <5>
except web.HTTPNotFound: # <6>
status = HTTPStatus.not_found
msg = 'not found'
except Exception as exc:
raise FetchError(cc) from exc # <7>
else:
save_flag(image, cc.lower() + '.gif') # <8>
status = HTTPStatus.ok
msg = 'OK'
if verbose and msg:
print(cc, msg)
return Result(status, cc)
# END FLAGS2_ASYNCIO_TOP
# BEGIN FLAGS2_ASYNCIO_DOWNLOAD_MANY
def get_category(request):
"""
Args:
request: category_name is required
Returns:
the configuration items in the given category.
:Example:
curl -X GET http://localhost:8081/category/PURGE_READ
"""
category_name = request.match_info.get('category_name', None)
if not category_name:
raise web.HTTPBadRequest(reason="Category Name is required")
# TODO: make it optimized and elegant
cf_mgr = ConfigurationManager(connect.get_storage())
category = await cf_mgr.get_category_all_items(category_name)
if category is None:
raise web.HTTPNotFound(reason="No such Category Found for {}".format(category_name))
return web.json_response(category)
def get_category_item(request):
"""
Args:
request: category_name & config_item are required
Returns:
the configuration item in the given category.
:Example:
curl -X GET http://localhost:8081/foglamp/category/PURGE_READ/age
"""
category_name = request.match_info.get('category_name', None)
config_item = request.match_info.get('config_item', None)
if not category_name or not config_item:
raise web.HTTPBadRequest(reason="Both Category Name and Config items are required")
# TODO: make it optimized and elegant
cf_mgr = ConfigurationManager(connect.get_storage())
category_item = await cf_mgr.get_category_item(category_name, config_item)
if category_item is None:
raise web.HTTPNotFound(reason="No Category Item Found")
return web.json_response(category_item)
def get_backups(request):
"""
Returns a list of all backups
:Example: curl -X GET http://localhost:8082/foglamp/backup
:Example: curl -X GET http://localhost:8082/foglamp/backup?limit=2&skip=1&status=complete
"""
try:
limit = int(request.query['limit']) if 'limit' in request.query else None
skip = int(request.query['skip']) if 'skip' in request.query else None
status = request.query['status'] if 'status' in request.query else None
# TODO : Fix after actual implementation
Backup.get_backup_list.return_value = [{'id': 28, 'date': '2017-08-30 04:05:10.382', 'status': 'running'},
{'id': 27, 'date': '2017-08-29 04:05:13.392', 'status': 'failed'},
{'id': 26, 'date': '2017-08-28 04:05:08.201', 'status': 'complete'}]
# backup_json = [{"id": b[0], "date": b[1], "status": b[2]}
# for b in Backup.get_backup_list(limit=limit, skip=skip, status=status)]
backup_json = Backup.get_backup_list(limit=limit, skip=skip, status=status)
except Backup.DoesNotExist:
raise web.HTTPNotFound(reason='No backups found for queried parameters')
return web.json_response({"backups": backup_json})
def delete_backup(request):
"""
Delete a backup
:Example: curl -X DELETE http://localhost:8082/foglamp/backup/1
"""
backup_id = request.match_info.get('backup_id', None)
if not backup_id:
raise web.HTTPBadRequest(reason='Backup id is required')
else:
try:
backup_id = int(backup_id)
except ValueError:
raise web.HTTPBadRequest(reason='Invalid backup id')
try:
# TODO : Fix after actual implementation
Backup.delete_backup.return_value = "Backup deleted successfully"
except Backup.DoesNotExist:
raise web.HTTPNotFound(reason='Backup with {} does not exist'.format(backup_id))
_resp = Backup.delete_backup(id=backup_id)
return web.json_response({'message': _resp})
def restore_backup(request):
"""
Restore from a backup
:Example: curl -X PUT http://localhost:8082/foglamp/backup/1/restore
"""
backup_id = request.match_info.get('backup_id', None)
if not backup_id:
raise web.HTTPBadRequest(reason='Backup id is required')
else:
try:
backup_id = int(backup_id)
except ValueError:
raise web.HTTPBadRequest(reason='Invalid backup id')
try:
# TODO : Fix after actual implementation
Backup.restore_backup.return_value = 1
except Backup.DoesNotExist:
raise web.HTTPNotFound(reason='Backup with {} does not exist'.format(backup_id))
try:
Backup.restore_backup(id=backup_id)
return web.json_response({'message': 'Restore backup with id {} started successfully'.format(backup_id)})
except Backup.RestoreFailed as ex:
return web.json_response({'error': 'Restore backup with id {} failed, reason {}'.format(backup_id, ex)})
def get_scheduled_process(request):
"""
Returns a list of all the defined scheduled_processes from scheduled_processes table
"""
scheduled_process_name = request.match_info.get('scheduled_process_name', None)
if not scheduled_process_name:
raise web.HTTPBadRequest(reason='No Scheduled Process Name given')
payload = PayloadBuilder().SELECT(("name")).WHERE(["name", "=", scheduled_process_name]).payload()
_storage = connect.get_storage()
scheduled_process = _storage.query_tbl_with_payload('scheduled_processes', payload)
if len(scheduled_process['rows']) == 0:
raise web.HTTPNotFound(reason='No such Scheduled Process: {}.'.format(scheduled_process_name))
return web.json_response(scheduled_process['rows'][0].get("name"))
#################################
# Schedules
#################################
def delete_schedule(request):
"""
Delete a schedule from schedules table
:Example: curl -X DELETE http://localhost:8082/foglamp/schedule/dc9bfc01-066a-4cc0-b068-9c35486db87f
"""
try:
schedule_id = request.match_info.get('schedule_id', None)
if not schedule_id:
raise web.HTTPBadRequest(reason='Schedule ID is required.')
try:
assert uuid.UUID(schedule_id)
except ValueError as ex:
raise web.HTTPNotFound(reason="Invalid Schedule ID {}".format(schedule_id))
await server.Server.scheduler.delete_schedule(uuid.UUID(schedule_id))
return web.json_response({'message': 'Schedule deleted successfully', 'id': schedule_id})
except (ValueError, ScheduleNotFoundError) as ex:
raise web.HTTPNotFound(reason=str(ex))
def unregister(request):
""" Deregister a service
:Example: curl -X DELETE http://localhost:8082/foglamp/service/dc9bfc01-066a-4cc0-b068-9c35486db87f
"""
try:
service_id = request.match_info.get('service_id', None)
if not service_id:
raise web.HTTPBadRequest(reason='Service id is required')
try:
Service.Instances.get(idx=service_id)
except Service.DoesNotExist:
raise web.HTTPBadRequest(reason='Service with {} does not exist'.format(service_id))
Service.Instances.unregister(service_id)
_resp = {'id': str(service_id), 'message': 'Service unregistered'}
return web.json_response(_resp)
except ValueError as ex:
raise web.HTTPNotFound(reason=str(ex))
def error_middleware(app, handler):
async def middleware_handler(request):
if_trace = request.query.get('trace') if 'trace' in request.query and request.query.get('trace') == '1' else None
try:
response = await handler(request)
if response.status == 404:
return handle_api_exception({"code": response.status, "message": response.message}, ex.__class__.__name__, if_trace)
return response
except (web.HTTPNotFound, web.HTTPBadRequest) as ex:
return handle_api_exception({"code": ex.status_code, "message": ex.reason}, ex.__class__.__name__, if_trace)
except web.HTTPException as ex:
raise
# Below Exception must come last as it is the super class of all exceptions
except Exception as ex:
return handle_api_exception(ex, ex.__class__.__name__, if_trace)
return middleware_handler
def http_handler(self, request):
path = request.path.split("/")[2]
logging.info("%s %s", request, path)
handler = getattr(self, "_http_%s" % path, None)
if handler:
return await handler(request)
return web.HTTPNotFound()
def handle_wsdl_request(self, req):
ctx = AioMethodContext(self, req, 'text/xml; charset=utf-8',
aiohttp_app=self._aiohttp_app)
if self.doc.wsdl11 is None:
raise web.HTTPNotFound(headers=ctx.transport.resp_headers)
if self._wsdl is None:
self._wsdl = self.doc.wsdl11.get_interface_document()
ctx.transport.wsdl = self._wsdl
if ctx.transport.wsdl is None:
with self._mtx_build_interface_document:
try:
ctx.transport.wsdl = self._wsdl
if ctx.transport.wsdl is None:
actual_url = urlunparse([req.scheme, req.host, req.path, '', '', ''])
self.doc.wsdl11.build_interface_document(actual_url)
ctx.transport.wsdl = self._wsdl = self.doc.wsdl11.get_interface_document()
except Exception as e:
logger.exception(e)
ctx.transport.wsdl_error = e
self.event_manager.fire_event('wsdl_exception', ctx)
raise web.HTTPInternalServerError(headers=ctx.transport.resp_headers)
self.event_manager.fire_event('wsdl', ctx)
ctx.transport.resp_headers['Content-Length'] = str(len(ctx.transport.wsdl))
ctx.close()
return await self.make_streaming_response(
req=req,
code=200,
headers=ctx.transport.resp_headers,
content=[ctx.transport.wsdl])
def download_file(self, request):
file_id = request.match_info['file_id']
record = await db.tracks.find_one({ "file_id": file_id })
if not record:
return web.HTTPNotFound()
file = await self.bot.get_file(file_id)
file_path = file["file_path"]
range = request.headers.get("range")
copy_headers = ["content-length", "content-range", "etag", "last-modified"]
async with self.bot.download_file(file_path, range) as r:
# Prepare headers
resp = web.StreamResponse(status=r.status)
resp.content_type = record["mime_type"]
for h in copy_headers:
val = r.headers.get(h)
if val:
resp.headers[h] = val
await resp.prepare(request)
# Send content
while True:
chunk = await r.content.read(chunk_size)
if not chunk:
break
resp.write(chunk)
return resp
def get_flag(base_url, cc): # <2>
url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
resp = yield from aiohttp.request('GET', url)
with contextlib.closing(resp):
if resp.status == 200:
image = yield from resp.read()
return image
elif resp.status == 404:
raise web.HTTPNotFound()
else:
raise aiohttp.HttpProcessingError(
code=resp.status, message=resp.reason,
headers=resp.headers)
def get_flag(base_url, cc): # <2>
url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
with closing(await aiohttp.request('GET', url)) as resp:
if resp.status == 200:
image = await resp.read()
return image
elif resp.status == 404:
raise web.HTTPNotFound()
else:
raise aiohttp.HttpProcessingError(
code=resp.status, message=resp.reason,
headers=resp.headers)
def register_start_api(app, dispatcher, route='/start'):
async def api_cipherkey_get(request):
eff = Effect(EPrivKeyGet(request.match_info['hash']))
try:
key = await asyncio_perform(dispatcher, eff)
except PrivKeyNotFound:
raise web.HTTPNotFound(text='Unknown hash')
return web.Response(body=key, content_type='application/octet-stream')
async def api_cipherkey_post(request):
cipherkey = await request.read()
hash = request.match_info['hash']
eff = Effect(EPrivKeyAdd(hash, cipherkey))
try:
await asyncio_perform(dispatcher, eff)
except PrivKeyHashCollision:
raise web.HTTPConflict(text='This hash already exists...')
logger.info('New cipherkey `%s` registered' % hash)
return web.Response()
async def api_pubkey_post(request):
pubkey = await request.read()
# TODO: should provide a token to avoid impersonation
identity = request.match_info['identity']
eff = Effect(EPubKeyAdd(identity, pubkey))
try:
await asyncio_perform(dispatcher, eff)
except ParsecError as exc:
return web.HTTPConflict(text=exc.label)
logger.info('New identity `%s` registered' % identity)
return web.Response()
app.router.add_get(route + '/cipherkey/{hash}', api_cipherkey_get)
app.router.add_post(route + '/cipherkey/{hash}', api_cipherkey_post)
app.router.add_post(route + '/pubkey/{identity}', api_pubkey_post)