def delete_item(self, item):
# Make sure the object can be indexed
if not class_is_indexed(item.__class__):
return
# Get mapping
mapping = self.mapping_class(item.__class__)
# Delete document
try:
self.es.delete(
self.name,
mapping.get_document_type(),
mapping.get_document_id(item),
)
except NotFoundError:
pass # Document doesn't exist, ignore this exception
python类NotFoundError()的实例源码
def setUp(self):
# Clean index
self.es = Elasticsearch(hosts=[LOCAL_ELASTICSEARCH])
try:
self.es.indices.delete(index='datahub')
self.es.indices.delete(index='events')
except NotFoundError:
pass
self.es.indices.create('datahub')
mapping = {'dataset': {'properties': self.MAPPING}}
self.es.indices.put_mapping(doc_type='dataset',
index='datahub',
body=mapping)
self.es.indices.create('events')
mapping = {'event': {'properties': {'timestamp': {'type': 'date'}}}}
self.es.indices.put_mapping(doc_type='event',
index='events',
body=mapping)
test_elasticsearch_backend.py 文件源码
项目:elasticsearch2-haystack
作者: NDevox
项目源码
文件源码
阅读 21
收藏 0
点赞 0
评论 0
def test_recreate_index(self):
clear_elasticsearch_index()
sb = connections['elasticsearch'].get_backend()
sb.silently_fail = True
sb.setup()
original_mapping = self.raw_es.indices.get_mapping(index=sb.index_name)
sb.clear()
sb.setup()
try:
updated_mapping = self.raw_es.indices.get_mapping(sb.index_name)
except elasticsearch.NotFoundError:
self.fail("There is no mapping after recreating the index")
self.assertEqual(original_mapping, updated_mapping,
"Mapping after recreating the index differs from the original one")
test_elasticsearch_driver.py 文件源码
项目:Image-search-engine
作者: praveenKumar88
项目源码
文件源码
阅读 18
收藏 0
点赞 0
评论 0
def setup_index(request, index_name):
es = Elasticsearch()
try:
es.indices.create(index=index_name)
except RequestError as e:
if e.error == u'index_already_exists_exception':
es.indices.delete(index_name)
else:
raise
def fin():
try:
es.indices.delete(index_name)
except NotFoundError:
pass
request.addfinalizer(fin)
def test_recreate_index(self):
clear_elasticsearch_index()
sb = connections['default'].get_backend()
sb.silently_fail = True
sb.setup()
original_mapping = self.raw_es.indices.get_mapping(index=sb.index_name)
sb.clear()
sb.setup()
try:
updated_mapping = self.raw_es.indices.get_mapping(sb.index_name)
except elasticsearch.NotFoundError:
self.fail("There is no mapping after recreating the index")
self.assertEqual(original_mapping, updated_mapping,
"Mapping after recreating the index differs from the original one")
test_elasticsearch_backend.py 文件源码
项目:django-haystack-elasticsearch
作者: CraveFood
项目源码
文件源码
阅读 21
收藏 0
点赞 0
评论 0
def test_recreate_index(self):
clear_elasticsearch_index()
sb = connections['default'].get_backend()
sb.silently_fail = True
sb.setup()
original_mapping = self.raw_es.indices.get_mapping(index=sb.index_name)
sb.clear()
sb.setup()
try:
updated_mapping = self.raw_es.indices.get_mapping(sb.index_name)
except elasticsearch.NotFoundError:
self.fail("There is no mapping after recreating the index")
self.assertEqual(original_mapping, updated_mapping,
"Mapping after recreating the index differs from the original one")
def test_recreate_index(self):
clear_elasticsearch_index()
sb = connections['default'].get_backend()
sb.silently_fail = True
sb.setup()
original_mapping = self.raw_es.indices.get_mapping(index=sb.index_name)
sb.clear()
sb.setup()
try:
updated_mapping = self.raw_es.indices.get_mapping(sb.index_name)
except elasticsearch.NotFoundError:
self.fail("There is no mapping after recreating the index")
self.assertEqual(original_mapping, updated_mapping,
"Mapping after recreating the index differs from the original one")
def delete(self):
try:
self.es.indices.delete(self.name)
except NotFoundError:
pass
def get_object(self, req, resp, path_params, for_update=False):
try:
obj = self.objects_class.get(*path_params, using=self.connection)
except NotFoundError:
raise HTTPNotFound()
return obj
def get_host_by_id(self, host_id):
search_kwargs = self.get_search_kwargs()
search_kwargs['id'] = host_id
del search_kwargs['size']
try:
es_result = self.es.get(**search_kwargs)
return Host(host_id=host_id, host_data=es_result.get('_source', {}))
except NotFoundError:
return None
def test_error_is_properly_logged(connection, caplog, port, server):
server.register_response('/i', status=404)
with raises(NotFoundError):
yield from connection.perform_request('GET', '/i', params={'some': 'data'})
for logger, level, message in caplog.record_tuples:
if logger == 'elasticsearch' and level == logging.WARNING:
assert message.startswith('GET http://localhost:%s/i?some=data [status:404 request:' % port)
break
else:
assert False, "Log not received"
def test_404_properly_raised(server, client):
server.register_response('/i/t/42', status=404)
with raises(NotFoundError):
yield from client.get(index='i', doc_type='t', id=42)
test_elasticsearch_driver.py 文件源码
项目:Image-search-engine
作者: praveenKumar88
项目源码
文件源码
阅读 18
收藏 0
点赞 0
评论 0
def cleanup_index(request, es, index_name):
def fin():
try:
es.indices.delete(index_name)
except NotFoundError:
pass
request.addfinalizer(fin)
def test_recreate_index(self):
"""
django.core.exceptions.ImproperlyConfigured:
Model '<class 'tests.test_app.models.MockModel'>' has more than one 'SearchIndex`` handling it.
Please exclude either '<tests.test_app.search_indexes.ElasticsearchAutocompleteMockModelSearchIndex object at 0x10b7881c8>'
or
'<tests.test_app.search_indexes.ElasticsearchComplexFacetsMockSearchIndex object at 0x10b788228>'
using the 'EXCLUDED_INDEXES' setting defined in 'settings.HAYSTACK_CONNECTIONS'.
"""
clear_elasticsearch_index()
search_backend = connections['default'].get_backend()
search_backend.silently_fail = True
search_backend.setup()
original_mapping = self.raw_es.indices.get_mapping(index=search_backend.index_name)
search_backend.clear()
search_backend.setup()
try:
updated_mapping = self.raw_es.indices.get_mapping(search_backend.index_name)
except elasticsearch.NotFoundError:
self.fail("There is no mapping after recreating the index")
self.assertEqual(original_mapping, updated_mapping,
"Mapping after recreating the index differs from the original one")
def delete_index(self):
try:
self.conn.indices.delete(index=self.index)
except NotFoundError:
pass
def user_entry(cnt, family_list, items, hof, family_id):
user = User()
user_json = {}
try:
obj = User.es.search(items['AADHAR_ID'])
if not obj:
push_user(cnt, family_list, items, user, user_json, hof, family_id)
cnt += 1
except NotFoundError:
print 'Index is not created'
return cnt
def get_object_source(source_id, object_id, doc_type=u'items'):
index_name = '%s_%s' % (current_app.config['DEFAULT_INDEX_PREFIX'],
source_id)
try:
obj = current_app.es.get(index=index_name, id=object_id,
doc_type=doc_type,
_source_include=['source_data'])
except NotFoundError, e:
if e.error.startswith('IndexMissingException'):
message = 'Source \'%s\' does not exist' % source_id
else:
message = 'Document not found.'
raise OcdApiError(message, 404)
resp = current_app.make_response(obj['_source']['source_data']['data'])
resp.mimetype = obj['_source']['source_data']['content_type']
# Log a 'get_object_source' event if usage logging is enabled
if current_app.config['USAGE_LOGGING_ENABLED']:
tasks.log_event.delay(
user_agent=request.user_agent.string,
referer=request.headers.get('Referer', None),
user_ip=request.remote_addr,
created_at=datetime.utcnow(),
event_type='get_object_source',
source_id=source_id,
doc_type=doc_type,
object_id=object_id
)
return resp
def search_view(request, **kwargs):
bucket_id = request.matchdict['bucket_id']
collection_id = request.matchdict['collection_id']
# Limit the number of results to return, based on existing Kinto settings.
paginate_by = request.registry.settings.get("paginate_by")
max_fetch_size = request.registry.settings["storage_max_fetch_size"]
if paginate_by is None or paginate_by <= 0:
paginate_by = max_fetch_size
configured = min(paginate_by, max_fetch_size)
# If the size is specified in query, ignore it if larger than setting.
specified = None
if "body" in kwargs:
try:
body = json.loads(kwargs["body"].decode("utf-8"))
specified = body.get("size")
except json.decoder.JSONDecodeError:
pass
if specified is None or specified > configured:
kwargs.setdefault("size", configured)
# Access indexer from views using registry.
indexer = request.registry.indexer
try:
results = indexer.search(bucket_id, collection_id, **kwargs)
except elasticsearch.NotFoundError as e:
# If plugin was enabled after the creation of the collection.
indexer.create_index(bucket_id, collection_id)
results = indexer.search(bucket_id, collection_id, **kwargs)
except elasticsearch.RequestError as e:
# Malformed query.
if isinstance(e.info["error"], dict):
message = e.info["error"]["reason"]
details = e.info["error"]["root_cause"][0]
else:
message = e.info["error"]
details = None
response = http_error(httpexceptions.HTTPBadRequest(),
errno=ERRORS.INVALID_PARAMETERS,
message=message,
details=details)
raise response
except elasticsearch.ElasticsearchException as e:
# General failure.
logger.exception("Index query failed.")
results = {}
return results
def find_urls_by_selector(self, selector, use_soft=True):
if not self.conn.indices.exists(index=self.index):
self.create_index()
or_query = [{'term': {'url': selector}}]
for key in self.hard_selectors:
or_query.append({'term': {key: selector}})
if use_soft:
for key in self.soft_selectors:
or_query.append({'term': {key: selector}})
logger.debug('including soft_selectors: %r', self.soft_selectors)
query = {
"query": {
"bool": {
"should": or_query,
}
}
}
# logger.debug(json.dumps(query, indent=4, sort_keys=True))
try:
res = self.conn.search(
index=self.index, doc_type=RECORD_TYPE,
_source_include=[], body=query)
'''
body={
'query': {
'multi_match': {
'query': selector,
'type': 'cross_fields',
# TODO: blend soft_selectors into this
'fields': self.hard_selectors,
}
}
})
'''
visited_urls = set()
for hit in res['hits']['hits']:
# logger.debug(hit['_score'])
url = hit['_id']
if url not in visited_urls:
visited_urls.add(url)
yield url
except NotFoundError, exc:
logger.warn('akagraph indexes do not exist yet: %s', exc)
return
def main(*args):
""" Drops and rebuilds the elasticsearch index """
skip_ids = set()
if "reset" in args:
documents.PhotoIndex.delete(ignore=404)
documents.PhotoIndex.create(ignore=400)
elif "repeat" in args:
pass
#es.Elasticsearch().search(index=app.config["ELASTICSEARCH_INDEX"], body={"query" : {"match_all":[]})
for photo in helpers.get_photo_batch_iterator():
fields = {}
if photo.deleted:
try:
documents.PhotoDocument.get(id=photo.id).delete()
except es.NotFoundError as e:
pass
else:
for field in list(iter(documents.PhotoDocument._doc_type.mapping)):
if hasattr(photo, field):
fields[field] = getattr(photo, field)
make, model = photo.make, photo.model
if make is None:
make = ""
if model is None:
model = ""
combined = ""
extract_brands = ("Nikon", "Canon", "Kodak", "Olympus", "Pentax", "Minolta", "Casio", "Fujifilm", "Sony")
for i in extract_brands:
if i.lower() in make.lower():
make = i
if "hewlett" in make.lower():
make = "HP"
#if len(make) > 1:
# make = make[0].upper() + make[1:].lower()
if model.lower().startswith(make.lower()):
model = model[len(make):].strip()
model = re.sub(u"zoom digital camera", "", model, flags=re.I).strip()
model = re.sub(u"digital camera$", "", model, flags=re.I).strip()
model = re.sub(u"digital$", "", model, flags=re.I).strip()
combined = ("%s %s" % (make, model)).strip()
fields["model"] = None if len(combined) < 1 else combined
fields["model_ci"] = None if len(combined) < 1 else combined
fields["lens_ci"] = photo.lens
fields["photo_id"] = photo.id
doc = documents.PhotoDocument(meta={'id' : photo.id}, **fields)
doc.save()
def get_object(source_id, object_id, doc_type=u'items'):
index_name = '%s_%s' % (current_app.config['DEFAULT_INDEX_PREFIX'],
source_id)
include_fields = [f.strip() for f in request.args.get('include_fields', '').split(',') if f.strip()]
excluded_fields = validate_included_fields(
include_fields=include_fields,
excluded_fields=current_app.config['EXCLUDED_FIELDS_DEFAULT'],
allowed_to_include=current_app.config['ALLOWED_INCLUDE_FIELDS_DEFAULT']
)
try:
obj = current_app.es.get(index=index_name, id=object_id,
doc_type=doc_type,
_source_exclude=excluded_fields)
except NotFoundError, e:
if e.error.startswith('IndexMissingException'):
message = 'Source \'%s\' does not exist' % source_id
else:
message = 'Document not found.'
raise OcdApiError(message, 404)
# Log a 'get_object' event if usage logging is enabled
if current_app.config['USAGE_LOGGING_ENABLED']:
tasks.log_event.delay(
user_agent=request.user_agent.string,
referer=request.headers.get('Referer', None),
user_ip=request.remote_addr,
created_at=datetime.utcnow(),
event_type='get_object',
source_id=source_id,
doc_type=doc_type,
object_id=object_id
)
for key in current_app.config['EXCLUDED_FIELDS_ALWAYS']:
try:
del obj['_source'][key]
except KeyError as e:
pass
return jsonify(obj['_source'])