def cli(ctx, **opts):
ctx.obj = opts
es_opts = {x: y for x, y in opts.items() if x in ('use_ssl', 'ca_certs', 'verify_certs', 'http_auth')}
ctx.obj['es_conn'] = Elasticsearch(opts['es_host'], **es_opts)
if opts['delete']:
try:
ctx.obj['es_conn'].indices.delete(opts['index'])
log('info', 'Index %s deleted' % opts['index'])
except NotFoundError:
log('info', 'Skipping index deletion')
if opts['index_settings_file']:
if ctx.obj['es_conn'].indices.exists(index=opts['index']):
ctx.obj['es_conn'].indices.put_settings(index=opts['index'], body=opts['index_settings_file'].read())
else:
ctx.obj['es_conn'].indices.create(index=opts['index'], body=opts['index_settings_file'].read())
if ctx.invoked_subcommand is None:
commands = cli.commands.keys()
if ctx.default_map:
default_command = ctx.default_map.get('default_command')
if default_command:
command = cli.get_command(ctx, default_command)
if command:
ctx.invoke(command, **ctx.default_map[default_command]['arguments'])
return
else:
ctx.fail('Cannot find default_command: {},\navailable commands are: {}'.format(default_command, ", ".join(commands)))
else:
ctx.fail('No subcommand specified via command line / task file,\navailable commands are: {}'.format(", ".join(commands)))
else:
ctx.fail('No subcommand specified via command line / task file,\navailable commands are: {}'.format(", ".join(commands)))
python类NotFoundError()的实例源码
def setup(self):
"""
Defers loading until needed.
"""
# Get the existing mapping & cache it. We'll compare it
# during the ``update`` & if it doesn't match, we'll put the new
# mapping.
try:
self.existing_mapping = self.conn.indices.get_mapping(index=self.index_name)
except NotFoundError:
pass
except Exception:
if not self.silently_fail:
raise
unified_index = haystack.connections[self.connection_alias].get_unified_index()
self.content_field_name, field_mapping = self.build_schema(unified_index.all_searchfields())
# fixing ES 1.x/2.x compatible `_boost`
current_mapping = {
'modelresult': {
'properties': field_mapping,
}
}
if elasticsearch.VERSION < (2, 0, 0):
current_mapping['modelresult']['_boost'] = {
'name': 'boost',
'null_value': 1.0
}
# end fixing ES 1.x/2.x compatible `_boost`
if current_mapping != self.existing_mapping:
try:
# Make sure the index is there first.
self.conn.indices.create(index=self.index_name, body=self.DEFAULT_SETTINGS, ignore=400)
self.conn.indices.put_mapping(index=self.index_name, doc_type='modelresult', body=current_mapping)
self.existing_mapping = current_mapping
except Exception:
if not self.silently_fail:
raise
self.setup_complete = True
def setup(self):
"""
Defers loading until needed.
"""
# Get the existing mapping & cache it. We'll compare it
# during the ``update`` & if it doesn't match, we'll put the new
# mapping.
try:
self.existing_mapping = self.conn.indices.get_mapping(index=self.index_name)
except NotFoundError:
pass
except Exception:
if not self.silently_fail:
raise
unified_index = haystack.connections[self.connection_alias].get_unified_index()
self.content_field_name, field_mapping = self.build_schema(unified_index.all_searchfields())
current_mapping = {
'modelresult': {
'properties': field_mapping,
}
}
if current_mapping != self.existing_mapping:
try:
# Make sure the index is there first.
self.conn.indices.create(index=self.index_name, body=self.DEFAULT_SETTINGS, ignore=400)
self.conn.indices.put_mapping(index=self.index_name, doc_type='modelresult', body=current_mapping)
self.existing_mapping = current_mapping
except Exception:
if not self.silently_fail:
raise
self.setup_complete = True
def delete(uuid: str, replica: str):
authenticated_user_email = request.token_info['email']
es_client = ElasticsearchClient.get(logger)
try:
response = es_client.get(index=Config.get_es_index_name(ESIndexType.subscriptions, Replica[replica]),
doc_type=ESDocType.subscription.name,
id=uuid)
except NotFoundError as ex:
raise DSSException(requests.codes.not_found, "not_found", "Cannot find subscription!")
stored_metadata = response['_source']
if stored_metadata['owner'] != authenticated_user_email:
# common_error_handler defaults code to capitalized 'Forbidden' for Werkzeug exception. Keeping consistent.
raise DSSException(requests.codes.forbidden, "Forbidden", "Your credentials can't access this subscription!")
# get all indexes that use current alias
alias_name = Config.get_es_alias_name(ESIndexType.docs, Replica[replica])
doc_indexes = _get_indexes_by_alias(es_client, alias_name)
_unregister_percolate(es_client, doc_indexes, uuid)
es_client.delete(index=Config.get_es_index_name(ESIndexType.subscriptions, Replica[replica]),
doc_type=ESDocType.subscription.name,
id=uuid)
timestamp = datetime.datetime.utcnow()
time_deleted = timestamp.strftime("%Y-%m-%dT%H%M%S.%fZ")
return jsonify({'timeDeleted': time_deleted}), requests.codes.okay
def _get_indexes_by_alias(es_client: Elasticsearch, alias_name: str):
try:
return list(es_client.indices.get_alias(alias_name).keys())
except NotFoundError:
return []
def update_nr_replicas(es, max_retry, nr_replicas, index):
for i in range(max_retry, 0, -1):
try:
es.indices.put_settings(
body={"index": {"number_of_replicas": int(nr_replicas)}},
index=index)
log.info("Updating replicas done")
return
except (ConnectionError, NotFoundError):
log.warning(
"Updating replicas failed. Waiting for {} sec".format(i))
time.sleep(i)
log.error("Updating replicas definitely failed")
def get_story(owner_id, story_id, story_type=None, expand=True):
"""Fetch a single story given its story_id."""
assert story_id
story = get_stories(owner_id=owner_id, stories=story_id,
story_type=story_type, expand=expand)
if not story:
msg = 'Story %s' % story_id
if story_type:
msg += ' [%s]' % story_type
raise NotFoundError(msg)
if len(story) > 1:
log.error('Found multiple stories with story_id %s', story_id)
return story[0]
def delete_story(owner_id, story_id):
"""Delete a story."""
index = 'app-logs-*'
query = {
'query': {
'bool': {
'filter': {
'bool': {
'must': [
{'term': {'owner_id': owner_id}},
{'term': {'story_id': story_id}},
]
}
}
}
}
}
# Delete all documents matching the above query.
result = es().delete_by_query(index=index, body=query, conflicts='proceed')
if not result['deleted']:
raise NotFoundError('story_id %s' % story_id)
# Report results.
msg = 'Deleted %s log(s) with story_id %s' % (result['deleted'], story_id)
if result['version_conflicts']:
msg += ' Counted %s version_conflicts' % result['version_conflicts']
if result['failures']:
msg += ' Finished with failures: %s' % result['failures']
log.warn(msg)
def test_remove_invalid_pk(self):
""" Test removing an object that is not in the index.
Removing an object that is not in the index should raise a
NotFoundError
"""
thread = create_thread()
self.backend.add(thread)
self.backend.remove(thread)
# try removing it after it's been removed
with self.assertRaises(NotFoundError):
self.backend.remove(thread)
def test_annotation_increments_stat_if_get_raises_not_found_error(self,
statsd):
request = mock_request()
request.es.get.side_effect = es_exceptions.NotFoundError
try:
views.AnnotationController(request).annotation()
except:
pass
statsd.incr.assert_called_once_with(
"views.annotation.404.annotation_not_found")
def test_annotation_raises_http_not_found_if_get_raises_not_found(self):
request = mock_request()
request.es.get.side_effect = es_exceptions.NotFoundError
with pytest.raises(httpexceptions.HTTPNotFound):
views.AnnotationController(request).annotation()
def es_search(es, *args, **kwargs):
try:
results = es.search(request_timeout=10, **kwargs)
except NotFoundError:
logger.debug('Index not found: args: {}, kwargs: {}'.format(args, kwargs))
return
return results
def query(kind, userid, size=50, **kw):
kind_params = ENABLED_SEARCHES.get(kind)
try:
# Arguments received from a network request come in kw, as a mapping
# between param_name and a list of received values.
# If size was provided by the user, it will be a list, so we take its
# first item.
if type(size) is list:
size = size[0]
if int(size) > 100:
size = 100
from_ = int(kw.pop('from', [0])[0])
api_params = dict([
('index', kind_params['index']),
('doc_type', kind_params['doc_type']),
('size', size),
('from_', from_)
])
body = build_dsl(kind_params, userid, kw)
api_params['body'] = json.dumps(body)
ret = _get_engine().search(**api_params)
logging.info('Performing query %r', kind_params)
logging.info('api_params %r', api_params)
logging.info('ret %r', ret)
if ret.get('hits') is not None:
results = [hit['_source'] for hit in ret['hits']['hits']]
total = ret['hits']['total']
total_bytes = ret.get('aggregations')['total_bytes']['value']
else:
results = []
total = 0
total_bytes = 0
return {
'results': results,
'summary': {
"total": total,
"totalBytes": total_bytes
}
}
except (NotFoundError, json.decoder.JSONDecodeError, ValueError) as e:
logging.error("query: %r" % e)
return {
'results': [],
'summary': {
"total": 0,
"totalBytes": 0
},
'error': str(e)
}
def generate_index(self, indexName):
"""Generates the index on Elasticsearch
This method is intended to be used internally. It creates an index
using certains parameters to get a better search performance.
:params str indexName: Name of the new index
"""
body = {'mappings': {
self.type: {
'properties': {},
'dynamic': True
}
},
'settings': {
'analysis': {
'analyzer': {
'my_custom_analyzer': {
'type': 'custom',
'tokenizer': 'standard',
'filter': ['lowercase', 'my_ascii_folding']
}
},
'filter': {
'my_ascii_folding': {
'type': 'asciifolding',
'preserve_original': True
}
}
}
}}
suggest_field = {
'type': 'completion',
'analyzer': 'my_custom_analyzer',
'search_analyzer': 'standard',
'preserve_separators': False,
'preserve_position_increments': False
}
body['mappings'][self.type]['properties'] = {
'entity_id': {'type': 'string'},
'entity_uri': {'type': 'string'},
'description': {'type': 'object'},
'label': {'type': 'object'},
'alt_label': {'type': 'object'},
'label_suggest': suggest_field
}
try:
self.es.indices.delete(index=indexName)
except es_exceptions.NotFoundError:
pass
self.es.indices.create(index=indexName, body=body)